天天看點

UITextView實作圖檔、視訊和表情與插入删除和删除。

表情是特殊的串。詳細使用見

​《表情包元件》。

通過UITextView插入NSTextAttachment來顯示圖檔。很不幸NSTextAttachment隻能展示圖檔,不能展示視訊。可以通過類繼承來實作把視訊連結存入它的子類,展示隻能按照視訊的首幀圖展示。

視訊的和圖檔的上産可以通過第三方元件LFImagePickerController選擇視訊和圖檔,視訊可以可以拿到視訊的首幀圖和NSData類型的視訊資料。

然後把視訊或圖像上傳伺服器,伺服器傳回它的位址。

最後解析UITextView的attributedText,組裝h5格式字元串發送給背景。

至于在其它用戶端的展示隻能使用标準h5程序元件wkwebview來展示了,這樣視訊圖檔文字都能正常展示了。這個下一篇文章介紹,防止文章又臭又長。

注意:咱們常用的TZImagePickerController的demo無法拿到視訊資料流,隻能拿到PHAsset,無法拿到雲上的視訊,我折騰了半天通過PHAsset也沒有拿到視訊流,若你牛你可以自己繼續探索,反正我是放棄了。别說些理論上的東西或網上查出的東西,我隻相信實踐是檢驗真理的唯一标準,自己實際搞出來才是真的正确。其實有很多技術就隔一層窗戶紙,但是哪怕差一個屬性設定。若不知道導緻不能真正實作也是白搭。有現成的LFImagePickerController幫你搞定一切,不用你為這絞盡腦汁。

DYTextAttachment.h檔案

#import <UIKit/UIKit.h>

typedef NS_ENUM(NSInteger, DYFileType)
{
    DYFileTypeText = 0,//第一種狀态
    DYFileTypeImage = 1,//第二種狀态
    DYFileTypeVideo = 2 //第三種狀态
};

@interface DYTextAttachment : NSTextAttachment
@property(strong, nonatomic) NSString *emojiTag;
@property(assign, nonatomic) CGSize emojiSize;  //For emoji image size
@property (nonatomic, assign) BOOL isVideo;

@property(strong, nonatomic) NSString *inputStr;
@property(strong, nonatomic) NSString *dataFilePath;
@property(nonatomic, assign) DYFileType dyFileType;
@end      

DYTextAttachment.m檔案

#import "DYTextAttachment.h"

@implementation DYTextAttachment
- (CGRect)attachmentBoundsForTextContainer:(NSTextContainer *)textContainer proposedLineFragment:(CGRect)lineFrag glyphPosition:(CGPoint)position characterIndex:(NSUInteger)charIndex {
    return CGRectMake(0, 0, _emojiSize.width, _emojiSize.height);
}
@end      

擷取圖檔或視訊并組裝資料發送圖檔或視訊上傳

- (void)lf_imagePickerController:(LFImagePickerController *)picker didFinishPickingResult:(NSArray <LFResultObject /* <LFResultImage/LFResultVideo> */*> *)results;
{
    NSString *documentPath = [NSSearchPathForDirectoriesInDomains(NSDocumentDirectory,NSUserDomainMask,YES) objectAtIndex:0];
    NSString *thumbnailFilePath = [documentPath stringByAppendingPathComponent:@"thumbnail"];
    NSString *originalFilePath = [documentPath stringByAppendingPathComponent:@"original"];
    
    NSFileManager *fileManager = [NSFileManager new];
    if (![fileManager fileExistsAtPath:thumbnailFilePath])
    {
        [fileManager createDirectoryAtPath:thumbnailFilePath withIntermediateDirectories:YES attributes:nil error:nil];
    }
    if (![fileManager fileExistsAtPath:originalFilePath])
    {
        [fileManager createDirectoryAtPath:originalFilePath withIntermediateDirectories:YES attributes:nil error:nil];
    }
    NSMutableArray <UIImage *>*images = [@[] mutableCopy];
    BOOL flag = NO;
    for (NSInteger i = 0; i < results.count; i++) {
        LFResultObject *result = results[i];
        if ([result isKindOfClass:[LFResultImage class]]) {
            
            LFResultImage *resultImage = (LFResultImage *)result;
            NSLog(@"resultImage.originalImage:%@", resultImage.originalImage);
            NSInteger i = 0;
//            for(i = 0; i<photos.count;i++)
            if(resultImage.originalImage)
            {
                flag = YES;
//                UIImage *image = photos[i];
                self.model.imageData = resultImage.originalData;
                self.model.image = resultImage.originalImage;// [self getCompressImageWithImage:resultImage.originalImage];
                self.model.dyFileType = DYFileTypeImage;
                [self excuteQA_UploadCommand];
            }
        } else if ([result isKindOfClass:[LFResultVideo class]]) {
            
            LFResultVideo *resultVideo = (LFResultVideo *)result;
            if(resultVideo.data)
            {
                flag = YES;
                self.model.imageData = resultVideo.data;
                self.model.image =  [self getCompressImageWithImage:resultVideo.coverImage]; //resultVideo.coverImage; 
                self.model.dyFileType = DYFileTypeVideo;
                [self excuteQA_UploadCommand];
            }
            NSLog(@"🎉🚀Info name:%@ -- infoLength:%fK -- videoLength:%fK -- infoSize:%@", resultVideo.info.name, resultVideo.info.byte/1000.0, resultVideo.data.length/1000.0, NSStringFromCGSize(resultVideo.info.size));
        } else {
            /** 無法處理的資料 */
            NSLog(@"%@", result.error);
        }
    }
}      
-(void)excuteQA_UploadCommand
{
    @weakify(self);
    
    NSMutableDictionary *dict = [NSMutableDictionary dictionary];

    [dict setObject:[DataStore sharedDataStore].UID forKey:@"UID"];
    [BITLoadingView show];
    [[NetWorkEngine shareNetWorkEngine] postDataFromServerWithUrlStr:@"http://huaxin.jtaqkt.com/MApi/StudyApi.asmx/QA_Upload" Paremeters:dict isPhoto:(self.model.dyFileType == DYFileTypeImage) data:self.model.imageData successOperation:^(id response) {
        @strongify(self);
        [BITLoadingView hide];
        CBPShopHomeResultHeadEntity *shopHomeResultHeadEntity = [CBPShopHomeResultHeadEntity mj_objectWithKeyValues:response];
        if(1 == shopHomeResultHeadEntity.message)
        {
            if(shopHomeResultHeadEntity.data && [shopHomeResultHeadEntity.data isKindOfClass:[NSDictionary class]])
            {
                CBPShopHomeResultHeadEntity *shopHomeResultHeadEntity1 = [CBPShopHomeResultHeadEntity mj_objectWithKeyValues:shopHomeResultHeadEntity.data];
                DYTextAttachment *textAttachment = [[DYTextAttachment alloc] init];
                textAttachment.dataFilePath = shopHomeResultHeadEntity1.FilePath;
                textAttachment.image = self.model.image;//[self getCompressImageWithImage:resultImage.originalImage];
                
                textAttachment.emojiSize = CGSizeMake(textAttachment.image.size.width, textAttachment.image.size.height);
                textAttachment.bounds = CGRectMake(0, 0, textAttachment.image.size.width, textAttachment.image.size.height);
//                textAttachment.contents = self.model.imageData;
                textAttachment.dyFileType = self.model.dyFileType;
                textAttachment.bounds = CGRectMake(0, 0, textAttachment.image.size.width, textAttachment.image.size.height);
                NSAttributedString *str = [NSAttributedString attributedStringWithAttachment:textAttachment];
                //Insert emoji image
                [self.inputTextView.textStorage insertAttributedString:str atIndex:self.inputTextView.selectedRange.location];
                self.inputTextView.selectedRange = NSMakeRange(self.inputTextView.selectedRange.location+1, 0); //
                [self.rightBarButton setTitleColor:BGColorHex(ED5058) forState:UIControlStateNormal];
                self.rightBarButton.userInteractionEnabled = YES;
                self.describeTitleLabel.hidden = YES;
            }
        }
        else
        {
            [[BITNoticeView currentNotice] showErrorNotice:shopHomeResultHeadEntity.messagestr];
        }
    } failoperation:^(NSError *error) {
        @strongify(self);
        [BITLoadingView hide];
        [[BITNoticeView currentNotice] showErrorNotice:error.domain];
    }];
}      
-(UIImage *)getCompressImageWithImage:(UIImage *)image
{
    if(image.size.width > (FULL_WIDTH-COMMON_EDGE_DISTANCE*2))
    {
        // 建立一個bitmap的context
        // 并把它設定成為目前正在使用的context
        UIGraphicsBeginImageContext(CGSizeMake((FULL_WIDTH-COMMON_EDGE_DISTANCE*2), (FULL_WIDTH-COMMON_EDGE_DISTANCE*2)*image.size.height/image.size.width));
        [image drawInRect:CGRectMake(0, 0, (FULL_WIDTH-COMMON_EDGE_DISTANCE*2) , (FULL_WIDTH-COMMON_EDGE_DISTANCE*2)*image.size.height/image.size.width)];
        // 從目前context中建立一個改變大小後的圖檔
        UIImage *newImage = UIGraphicsGetImageFromCurrentImageContext();
        // 使目前的context出堆棧
        UIGraphicsEndImageContext();
        NSData *data = UIImageJPEGRepresentation(newImage, 0.8);
//        self.model.imageData = data;
        UIImage *image1 = [UIImage imageWithData:data];
        return image1;
    }
    else
    {
        self.model.imageData = UIImageJPEGRepresentation(image, 0.8);
        return image;
    }
}