| 
					
					
						
							
						
					
					
				 | 
				@ -23,8 +23,9 @@ Read the text 'kids feeding and playing with the horse' to generate a text embed | 
			
		
		
	
		
			
				 | 
				 | 
				- Encode video (default): | 
				 | 
				 | 
				- Encode video (default): | 
			
		
		
	
		
			
				 | 
				 | 
				```python | 
				 | 
				 | 
				```python | 
			
		
		
	
		
			
				 | 
				 | 
				import towhee | 
				 | 
				 | 
				import towhee | 
			
		
		
	
		
			
				 | 
				 | 
				towhee.glob('./archery.mp4') \ | 
				 | 
				 | 
				 | 
			
		
		
	
		
			
				 | 
				 | 
				      .video_decode.ffmpeg() \ | 
				 | 
				 | 
				 | 
			
		
		
	
		
			
				 | 
				 | 
				 | 
				 | 
				 | 
				towhee.dc(['./demo_video.mp4']) \ | 
			
		
		
	
		
			
				 | 
				 | 
				 | 
				 | 
				 | 
				      .video_decode.ffmpeg(sample_type='uniform_temporal_subsample', args={'num_samples': 4}) \ | 
			
		
		
	
		
			
				 | 
				 | 
				 | 
				 | 
				 | 
				      .runas_op(func=lambda x: [y for y in x]) \ | 
			
		
		
	
		
			
				 | 
				 | 
				      .video_text_embedding.frozen_in_time(model_name='frozen_in_time_base_16_244', modality='video', device='cpu') \ | 
				 | 
				 | 
				      .video_text_embedding.frozen_in_time(model_name='frozen_in_time_base_16_244', modality='video', device='cpu') \ | 
			
		
		
	
		
			
				 | 
				 | 
				      .show() | 
				 | 
				 | 
				      .show() | 
			
		
		
	
		
			
				 | 
				 | 
				
 | 
				 | 
				 | 
				
 | 
			
		
		
	
	
		
			
				| 
					
					
					
						
							
						
					
				 | 
				@ -46,8 +47,9 @@ towhee.dc(['kids feeding and playing with the horse']) \ | 
			
		
		
	
		
			
				 | 
				 | 
				```python | 
				 | 
				 | 
				```python | 
			
		
		
	
		
			
				 | 
				 | 
				import towhee | 
				 | 
				 | 
				import towhee | 
			
		
		
	
		
			
				 | 
				 | 
				
 | 
				 | 
				 | 
				
 | 
			
		
		
	
		
			
				 | 
				 | 
				towhee.glob['path']('./archery.mp4') \ | 
				 | 
				 | 
				 | 
			
		
		
	
		
			
				 | 
				 | 
				      .video_decode.ffmpeg['path', 'frames']() \ | 
				 | 
				 | 
				 | 
			
		
		
	
		
			
				 | 
				 | 
				 | 
				 | 
				 | 
				towhee.dc['path'](['./demo_video.mp4']) \ | 
			
		
		
	
		
			
				 | 
				 | 
				 | 
				 | 
				 | 
				      .video_decode.ffmpeg['path', 'frames'](sample_type='uniform_temporal_subsample', args={'num_samples': 4}) \ | 
			
		
		
	
		
			
				 | 
				 | 
				 | 
				 | 
				 | 
				      .runas_op['frames', 'frames'](func=lambda x: [y for y in x]) \ | 
			
		
		
	
		
			
				 | 
				 | 
				      .video_text_embedding.frozen_in_time['frames', 'vec'](model_name='frozen_in_time_base_16_244', modality='video', device='cpu') \ | 
				 | 
				 | 
				      .video_text_embedding.frozen_in_time['frames', 'vec'](model_name='frozen_in_time_base_16_244', modality='video', device='cpu') \ | 
			
		
		
	
		
			
				 | 
				 | 
				      .select['path', 'vec']() \ | 
				 | 
				 | 
				      .select['path', 'vec']() \ | 
			
		
		
	
		
			
				 | 
				 | 
				      .show(formatter={'path': 'video_path'}) | 
				 | 
				 | 
				      .show(formatter={'path': 'video_path'}) | 
			
		
		
	
	
		
			
				| 
					
						
							
						
					
					
					
				 | 
				
  |