天天看点

UITextView实现图片、视频和表情与插入删除和删除。

表情是特殊的串。详细使用见

​《表情包组件》。

通过UITextView插入NSTextAttachment来显示图片。很不幸NSTextAttachment只能展示图片,不能展示视频。可以通过类继承来实现把视频链接存入它的子类,展示只能按照视频的首帧图展示。

视频的和图片的上产可以通过第三方组件LFImagePickerController选择视频和图片,视频可以可以拿到视频的首帧图和NSData类型的视频数据。

然后把视频或图像上传服务器,服务器返回它的地址。

最后解析UITextView的attributedText,组装h5格式字符串发送给后台。

至于在其它客户端的展示只能使用标准h5进程组件wkwebview来展示了,这样视频图片文字都能正常展示了。这个下一篇文章介绍,防止文章又臭又长。

注意:咱们常用的TZImagePickerController的demo无法拿到视频数据流,只能拿到PHAsset,无法拿到云上的视频,我折腾了半天通过PHAsset也没有拿到视频流,若你牛你可以自己继续探索,反正我是放弃了。别说些理论上的东西或网上查出的东西,我只相信实践是检验真理的唯一标准,自己实际搞出来才是真的正确。其实有很多技术就隔一层窗户纸,但是哪怕差一个属性设置。若不知道导致不能真正实现也是白搭。有现成的LFImagePickerController帮你搞定一切,不用你为这绞尽脑汁。

DYTextAttachment.h文件

#import <UIKit/UIKit.h>

typedef NS_ENUM(NSInteger, DYFileType)
{
    DYFileTypeText = 0,//第一种状态
    DYFileTypeImage = 1,//第二种状态
    DYFileTypeVideo = 2 //第三种状态
};

@interface DYTextAttachment : NSTextAttachment
@property(strong, nonatomic) NSString *emojiTag;
@property(assign, nonatomic) CGSize emojiSize;  //For emoji image size
@property (nonatomic, assign) BOOL isVideo;

@property(strong, nonatomic) NSString *inputStr;
@property(strong, nonatomic) NSString *dataFilePath;
@property(nonatomic, assign) DYFileType dyFileType;
@end      

DYTextAttachment.m文件

#import "DYTextAttachment.h"

@implementation DYTextAttachment
- (CGRect)attachmentBoundsForTextContainer:(NSTextContainer *)textContainer proposedLineFragment:(CGRect)lineFrag glyphPosition:(CGPoint)position characterIndex:(NSUInteger)charIndex {
    return CGRectMake(0, 0, _emojiSize.width, _emojiSize.height);
}
@end      

获取图片或视频并组装数据发送图片或视频上传

- (void)lf_imagePickerController:(LFImagePickerController *)picker didFinishPickingResult:(NSArray <LFResultObject /* <LFResultImage/LFResultVideo> */*> *)results;
{
    NSString *documentPath = [NSSearchPathForDirectoriesInDomains(NSDocumentDirectory,NSUserDomainMask,YES) objectAtIndex:0];
    NSString *thumbnailFilePath = [documentPath stringByAppendingPathComponent:@"thumbnail"];
    NSString *originalFilePath = [documentPath stringByAppendingPathComponent:@"original"];
    
    NSFileManager *fileManager = [NSFileManager new];
    if (![fileManager fileExistsAtPath:thumbnailFilePath])
    {
        [fileManager createDirectoryAtPath:thumbnailFilePath withIntermediateDirectories:YES attributes:nil error:nil];
    }
    if (![fileManager fileExistsAtPath:originalFilePath])
    {
        [fileManager createDirectoryAtPath:originalFilePath withIntermediateDirectories:YES attributes:nil error:nil];
    }
    NSMutableArray <UIImage *>*images = [@[] mutableCopy];
    BOOL flag = NO;
    for (NSInteger i = 0; i < results.count; i++) {
        LFResultObject *result = results[i];
        if ([result isKindOfClass:[LFResultImage class]]) {
            
            LFResultImage *resultImage = (LFResultImage *)result;
            NSLog(@"resultImage.originalImage:%@", resultImage.originalImage);
            NSInteger i = 0;
//            for(i = 0; i<photos.count;i++)
            if(resultImage.originalImage)
            {
                flag = YES;
//                UIImage *image = photos[i];
                self.model.imageData = resultImage.originalData;
                self.model.image = resultImage.originalImage;// [self getCompressImageWithImage:resultImage.originalImage];
                self.model.dyFileType = DYFileTypeImage;
                [self excuteQA_UploadCommand];
            }
        } else if ([result isKindOfClass:[LFResultVideo class]]) {
            
            LFResultVideo *resultVideo = (LFResultVideo *)result;
            if(resultVideo.data)
            {
                flag = YES;
                self.model.imageData = resultVideo.data;
                self.model.image =  [self getCompressImageWithImage:resultVideo.coverImage]; //resultVideo.coverImage; 
                self.model.dyFileType = DYFileTypeVideo;
                [self excuteQA_UploadCommand];
            }
            NSLog(@"🎉🚀Info name:%@ -- infoLength:%fK -- videoLength:%fK -- infoSize:%@", resultVideo.info.name, resultVideo.info.byte/1000.0, resultVideo.data.length/1000.0, NSStringFromCGSize(resultVideo.info.size));
        } else {
            /** 无法处理的数据 */
            NSLog(@"%@", result.error);
        }
    }
}      
-(void)excuteQA_UploadCommand
{
    @weakify(self);
    
    NSMutableDictionary *dict = [NSMutableDictionary dictionary];

    [dict setObject:[DataStore sharedDataStore].UID forKey:@"UID"];
    [BITLoadingView show];
    [[NetWorkEngine shareNetWorkEngine] postDataFromServerWithUrlStr:@"http://huaxin.jtaqkt.com/MApi/StudyApi.asmx/QA_Upload" Paremeters:dict isPhoto:(self.model.dyFileType == DYFileTypeImage) data:self.model.imageData successOperation:^(id response) {
        @strongify(self);
        [BITLoadingView hide];
        CBPShopHomeResultHeadEntity *shopHomeResultHeadEntity = [CBPShopHomeResultHeadEntity mj_objectWithKeyValues:response];
        if(1 == shopHomeResultHeadEntity.message)
        {
            if(shopHomeResultHeadEntity.data && [shopHomeResultHeadEntity.data isKindOfClass:[NSDictionary class]])
            {
                CBPShopHomeResultHeadEntity *shopHomeResultHeadEntity1 = [CBPShopHomeResultHeadEntity mj_objectWithKeyValues:shopHomeResultHeadEntity.data];
                DYTextAttachment *textAttachment = [[DYTextAttachment alloc] init];
                textAttachment.dataFilePath = shopHomeResultHeadEntity1.FilePath;
                textAttachment.image = self.model.image;//[self getCompressImageWithImage:resultImage.originalImage];
                
                textAttachment.emojiSize = CGSizeMake(textAttachment.image.size.width, textAttachment.image.size.height);
                textAttachment.bounds = CGRectMake(0, 0, textAttachment.image.size.width, textAttachment.image.size.height);
//                textAttachment.contents = self.model.imageData;
                textAttachment.dyFileType = self.model.dyFileType;
                textAttachment.bounds = CGRectMake(0, 0, textAttachment.image.size.width, textAttachment.image.size.height);
                NSAttributedString *str = [NSAttributedString attributedStringWithAttachment:textAttachment];
                //Insert emoji image
                [self.inputTextView.textStorage insertAttributedString:str atIndex:self.inputTextView.selectedRange.location];
                self.inputTextView.selectedRange = NSMakeRange(self.inputTextView.selectedRange.location+1, 0); //
                [self.rightBarButton setTitleColor:BGColorHex(ED5058) forState:UIControlStateNormal];
                self.rightBarButton.userInteractionEnabled = YES;
                self.describeTitleLabel.hidden = YES;
            }
        }
        else
        {
            [[BITNoticeView currentNotice] showErrorNotice:shopHomeResultHeadEntity.messagestr];
        }
    } failoperation:^(NSError *error) {
        @strongify(self);
        [BITLoadingView hide];
        [[BITNoticeView currentNotice] showErrorNotice:error.domain];
    }];
}      
-(UIImage *)getCompressImageWithImage:(UIImage *)image
{
    if(image.size.width > (FULL_WIDTH-COMMON_EDGE_DISTANCE*2))
    {
        // 创建一个bitmap的context
        // 并把它设置成为当前正在使用的context
        UIGraphicsBeginImageContext(CGSizeMake((FULL_WIDTH-COMMON_EDGE_DISTANCE*2), (FULL_WIDTH-COMMON_EDGE_DISTANCE*2)*image.size.height/image.size.width));
        [image drawInRect:CGRectMake(0, 0, (FULL_WIDTH-COMMON_EDGE_DISTANCE*2) , (FULL_WIDTH-COMMON_EDGE_DISTANCE*2)*image.size.height/image.size.width)];
        // 从当前context中创建一个改变大小后的图片
        UIImage *newImage = UIGraphicsGetImageFromCurrentImageContext();
        // 使当前的context出堆栈
        UIGraphicsEndImageContext();
        NSData *data = UIImageJPEGRepresentation(newImage, 0.8);
//        self.model.imageData = data;
        UIImage *image1 = [UIImage imageWithData:data];
        return image1;
    }
    else
    {
        self.model.imageData = UIImageJPEGRepresentation(image, 0.8);
        return image;
    }
}