List Files
List and browse files and directories in the sandbox environment with advanced filtering, sorting, and metadata extraction capabilities.
List Files
Browse and enumerate files and directories within the sandbox environment with comprehensive filtering options, sorting capabilities, and detailed metadata extraction.
📂 Directory Navigation
File listing provides detailed information about sandbox contents with support for recursive directory traversal, pattern matching, and custom sorting options.
Overview
The List Files tool enables comprehensive directory browsing and file enumeration within the sandbox environment, supporting advanced filtering, sorting, and metadata extraction for efficient file management.
Key Features
- Recursive Listing - Browse directories and subdirectories recursively
- Pattern Filtering - Filter files by name patterns, extensions, and wildcards
- Metadata Extraction - Retrieve detailed file properties and attributes
- Sorting Options - Sort by name, size, date, type, and custom criteria
- Performance Control - Limit results and pagination for large directories
Methods
listFiles
List files and directories in the sandbox environment.
| Parameter | Type | Required | Description |
|---|---|---|---|
| path | String | Yes | Directory path to list (default: '/sandbox') |
| recursive | Boolean | No | Include subdirectories recursively (default: false) |
| includeHidden | Boolean | No | Include hidden files and directories (default: false) |
| pattern | String | No | File name pattern or glob expression |
| extensions | Array | No | Filter by file extensions (e.g., ['.pdf', '.csv']) |
| sortBy | String | No | Sort criteria: 'name', 'size', 'modified', 'type' (default: 'name') |
| sortOrder | String | No | Sort order: 'asc' or 'desc' (default: 'asc') |
| limit | Number | No | Maximum number of items to return (default: 1000) |
| offset | Number | No | Number of items to skip for pagination (default: 0) |
| includeMetadata | Boolean | No | Include detailed file metadata (default: true) |
{
"path": "/sandbox/data",
"recursive": true,
"pattern": "*.csv",
"sortBy": "modified",
"sortOrder": "desc",
"limit": 50
}Output:
success(Boolean) - Operation success statuspath(String) - Listed directory pathtotalItems(Number) - Total number of items foundreturnedItems(Number) - Number of items returned in responseitems(Array) - Array of file and directory itemsname(String) - File or directory namepath(String) - Full path to itemtype(String) - Item type: 'file' or 'directory'size(Number) - File size in bytes (files only)modified(String) - Last modification timestampcreated(String) - Creation timestamppermissions(String) - File permissions (e.g., 'rwxr-xr-x')extension(String) - File extension (files only)mimeType(String) - MIME type (files only)
hasMore(Boolean) - Whether more items are availablenextOffset(Number) - Offset for next page of results
Filtering and Search
Pattern Matching
Directory Navigation
Recursive Directory Listing
def explore_directory_structure(root_path="/sandbox"):
"""Recursively explore directory structure."""
result = listFiles({
"path": root_path,
"recursive": True,
"includeHidden": False,
"sortBy": "type",
"includeMetadata": True
})
# Organize by directory structure
directories = {}
files = {}
for item in result['items']:
path_parts = item['path'].split('/')
dir_path = '/'.join(path_parts[:-1])
if item['type'] == 'directory':
if dir_path not in directories:
directories[dir_path] = []
directories[dir_path].append(item)
else:
if dir_path not in files:
files[dir_path] = []
files[dir_path].append(item)
return {
"directories": directories,
"files": files,
"summary": {
"total_directories": sum(len(dirs) for dirs in directories.values()),
"total_files": sum(len(file_list) for file_list in files.values()),
"total_size": sum(item['size'] for file_list in files.values() for item in file_list)
}
}
# Usage
structure = explore_directory_structure("/sandbox/projects")
print(f"Found {structure['summary']['total_files']} files in {structure['summary']['total_directories']} directories")Directory Tree Visualization
def create_directory_tree(path="/sandbox", max_depth=3):
"""Create visual directory tree representation."""
result = listFiles({
"path": path,
"recursive": True,
"sortBy": "name",
"includeMetadata": False
})
tree_structure = {}
for item in result['items']:
# Calculate depth
relative_path = item['path'][len(path):].lstrip('/')
depth = len(relative_path.split('/')) - 1 if relative_path else 0
if depth <= max_depth:
parts = relative_path.split('/') if relative_path else []
current = tree_structure
for part in parts:
if part not in current:
current[part] = {}
current = current[part]
def print_tree(tree, prefix="", is_last=True):
"""Print tree structure."""
items = list(tree.items())
for i, (name, subtree) in enumerate(items):
is_last_item = (i == len(items) - 1)
current_prefix = "└── " if is_last_item else "├── "
print(f"{prefix}{current_prefix}{name}")
if subtree:
extension = " " if is_last_item else "│ "
print_tree(subtree, prefix + extension, is_last_item)
print(f"Directory tree for: {path}")
print_tree(tree_structure)
# Usage
create_directory_tree("/sandbox/data", max_depth=2)File Analysis and Statistics
Directory Statistics
def analyze_directory_contents(path="/sandbox"):
"""Analyze directory contents and generate statistics."""
result = listFiles({
"path": path,
"recursive": True,
"includeMetadata": True,
"limit": 10000
})
stats = {
"file_count": 0,
"directory_count": 0,
"total_size": 0,
"extensions": {},
"size_distribution": {
"small": 0, # < 1MB
"medium": 0, # 1MB - 10MB
"large": 0, # 10MB - 100MB
"huge": 0 # > 100MB
},
"largest_files": [],
"newest_files": [],
"oldest_files": []
}
all_files = []
for item in result['items']:
if item['type'] == 'file':
stats['file_count'] += 1
size = item['size']
stats['total_size'] += size
all_files.append(item)
# Extension analysis
ext = item.get('extension', '').lower()
stats['extensions'][ext] = stats['extensions'].get(ext, 0) + 1
# Size distribution
if size < 1024 * 1024: # < 1MB
stats['size_distribution']['small'] += 1
elif size < 10 * 1024 * 1024: # < 10MB
stats['size_distribution']['medium'] += 1
elif size < 100 * 1024 * 1024: # < 100MB
stats['size_distribution']['large'] += 1
else:
stats['size_distribution']['huge'] += 1
else:
stats['directory_count'] += 1
# Sort files for top lists
all_files.sort(key=lambda x: x['size'], reverse=True)
stats['largest_files'] = all_files[:10]
all_files.sort(key=lambda x: x['modified'], reverse=True)
stats['newest_files'] = all_files[:10]
all_files.sort(key=lambda x: x['modified'])
stats['oldest_files'] = all_files[:10]
return stats
# Usage and reporting
def generate_directory_report(path="/sandbox"):
"""Generate comprehensive directory report."""
stats = analyze_directory_contents(path)
print(f"\n📊 Directory Analysis Report for: {path}")
print("=" * 50)
print(f"📁 Total Directories: {stats['directory_count']}")
print(f"📄 Total Files: {stats['file_count']}")
print(f"💾 Total Size: {stats['total_size'] / (1024*1024):.2f} MB")
print(f"\n📈 File Extensions:")
for ext, count in sorted(stats['extensions'].items(), key=lambda x: x[1], reverse=True)[:10]:
ext_display = ext if ext else '(no extension)'
print(f" {ext_display}: {count} files")
print(f"\n📏 Size Distribution:")
for category, count in stats['size_distribution'].items():
print(f" {category.title()}: {count} files")
print(f"\n🏆 Largest Files:")
for i, file_item in enumerate(stats['largest_files'][:5], 1):
size_mb = file_item['size'] / (1024*1024)
print(f" {i}. {file_item['name']} ({size_mb:.2f} MB)")
# Generate report
generate_directory_report("/sandbox/data")Pagination and Performance
Large Directory Handling
⚡ Performance Considerations
When listing large directories, use pagination and filtering to avoid memory issues and improve response times.
def paginated_file_listing(path, page_size=100):
"""Handle large directories with pagination."""
all_results = []
offset = 0
has_more = True
while has_more:
result = listFiles({
"path": path,
"recursive": False,
"limit": page_size,
"offset": offset,
"sortBy": "name"
})
if result['success']:
all_results.extend(result['items'])
has_more = result['hasMore']
offset = result.get('nextOffset', offset + page_size)
print(f"Loaded {len(result['items'])} items (Total: {len(all_results)})")
else:
break
return all_results
# Usage for large directories
large_directory_files = paginated_file_listing("/sandbox/massive_dataset", page_size=50)Selective Information Loading
def quick_file_overview(path, summary_only=True):
"""Get quick overview without heavy metadata."""
result = listFiles({
"path": path,
"recursive": False,
"includeMetadata": not summary_only,
"limit": 1000,
"sortBy": "type"
})
if summary_only:
# Quick summary
files = [item for item in result['items'] if item['type'] == 'file']
directories = [item for item in result['items'] if item['type'] == 'directory']
return {
"path": path,
"file_count": len(files),
"directory_count": len(directories),
"total_items": len(result['items'])
}
else:
return result
# Quick overview
overview = quick_file_overview("/sandbox/uploads")
print(f"Found {overview['file_count']} files and {overview['directory_count']} directories")Error Handling
Common Listing Issues
| Error Type | Cause | Resolution |
|---|---|---|
| Permission Denied | Insufficient read permissions | Check directory permissions |
| Path Not Found | Directory doesn't exist | Verify path and create if needed |
| Too Many Results | Directory contains excessive files | Use pagination and filtering |
| Access Timeout | Large directory scan timeout | Use smaller limits and pagination |
| Invalid Pattern | Malformed glob pattern | Check pattern syntax |
Robust Listing Function
def safe_file_listing(path, **options):
"""Robust file listing with error handling."""
try:
result = listFiles({
"path": path,
**options
})
if not result['success']:
print(f"❌ Failed to list files in {path}")
return None
if result['totalItems'] == 0:
print(f"📂 Directory {path} is empty")
else:
print(f"✅ Found {result['totalItems']} items in {path}")
return result
except Exception as e:
print(f"💥 Error listing {path}: {str(e)}")
# Try simplified listing
try:
fallback_result = listFiles({
"path": path,
"recursive": False,
"includeMetadata": False,
"limit": 100
})
print(f"🔄 Fallback listing succeeded: {len(fallback_result['items'])} items")
return fallback_result
except:
print(f"💥 Fallback listing also failed for {path}")
return None
# Usage with error handling
safe_result = safe_file_listing("/sandbox/problematic_directory", recursive=True)Integration Patterns
With File Search Tools
# Combine listing with searching
def enhanced_file_search(base_path, search_criteria):
"""Enhanced file search combining listing and filtering."""
# First get all files
all_files = listFiles({
"path": base_path,
"recursive": True,
"includeMetadata": True
})
# Apply custom search criteria
matching_files = []
for item in all_files['items']:
if item['type'] == 'file':
# Size criteria
if 'min_size' in search_criteria:
if item['size'] < search_criteria['min_size']:
continue
# Content type criteria
if 'mime_types' in search_criteria:
if item['mimeType'] not in search_criteria['mime_types']:
continue
# Name criteria
if 'name_contains' in search_criteria:
if search_criteria['name_contains'] not in item['name'].lower():
continue
matching_files.append(item)
return matching_files
# Usage
search_results = enhanced_file_search("/sandbox/data", {
'min_size': 1024 * 1024, # > 1MB
'mime_types': ['application/pdf', 'text/csv'],
'name_contains': 'report'
})Related Tools
File Search
Advanced file searching with content and metadata filtering
File Metadata
Extract detailed metadata and properties from files
Create Directory
Create and manage directory structures
Next Steps: Use with File Search for advanced filtering, or File Metadata for detailed file information.