当前位置: 首页 > news >正文

折腾笔记[45]-导入及导出ollama模型

摘要

使用cli工具从文件夹导入模型及导出本机ollama模型到文件夹.导入时可以自动处理重名的问题.

AGENTS.md声明

本文用于投喂龙虾(openclaw).

实操

1. 使用shell方式导出

[https://github.com/AaronFeng753/Ollama-Model-Dumper]

## 导出
MODEL="modelscope.cn/unsloth/Qwen3-Coder-30B-A3B-Instruct-1M-GGUF:Qwen3-Coder-30B-A3B-Instruct-1M-UD-IQ1_S.gguf" && \
TARGET="$HOME/Downloads/Qwen3-Coder-30B-A3B-Instruct-1M-GGUF" && \
mkdir -p "$TARGET" && \
ollama show --modelfile "$MODEL" > "$TARGET/Modelfile" && \
MANIFEST="$HOME/.ollama/models/manifests/modelscope.cn/unsloth/Qwen3-Coder-30B-A3B-Instruct-1M-GGUF/Qwen3-Coder-30B-A3B-Instruct-1M-UD-IQ1_S.gguf" && \
DIGEST=$(python3 -c "import json,sys; d=json.load(open('$MANIFEST')); print([l for l in d['layers'] if l['mediaType']=='application/vnd.ollama.image.model'][0]['digest'].replace('sha256:','sha256-'))") && \
cp "$HOME/.ollama/models/blobs/$DIGEST" "$TARGET/model.gguf" && \
ls -lh "$TARGET"# modelscope.cn/unsloth/Qwen3-Coder-30B-A3B-Instruct-1M-GGUF:Qwen3-Coder-30B-A3B-Instruct-1M-UD-Q2_K_XL.gguf
MODEL="modelscope.cn/unsloth/Qwen3-Coder-30B-A3B-Instruct-1M-GGUF:Qwen3-Coder-30B-A3B-Instruct-1M-UD-Q2_K_XL.gguf" && \
TARGET="$HOME/Downloads/Qwen3-Coder-30B-A3B-Instruct-1M-GGUF_UD-Q2_K_XL" && \
mkdir -p "$TARGET" && \
ollama show --modelfile "$MODEL" > "$TARGET/Modelfile" && \
MANIFEST="$HOME/.ollama/models/manifests/modelscope.cn/unsloth/Qwen3-Coder-30B-A3B-Instruct-1M-GGUF/Qwen3-Coder-30B-A3B-Instruct-1M-UD-Q2_K_XL.gguf" && \
DIGEST=$(python3 -c "import json,sys; d=json.load(open('$MANIFEST')); print([l for l in d['layers'] if l['mediaType']=='application/vnd.ollama.image.model'][0]['digest'].replace('sha256:','sha256-'))") && \
cp "$HOME/.ollama/models/blobs/$DIGEST" "$TARGET/model.gguf" && \
ls -lh "$TARGET"## 导入
cd "$env:USERPROFILE\Downloads\Qwen3-Coder-30B-A3B-Instruct-1M-GGUF"; `
(Get-Content Modelfile) -replace '^FROM\s+.*', 'FROM ./model.gguf' | Set-Content Modelfile; `
ollama create qwen3-coder-30b-local -f Modelfile

2. go的cli工具

// 文件: ollama_export_and_import.go
// 功能: 导出/导入ollama模型package mainimport ("bufio""encoding/json""fmt""os""os/exec""path/filepath""regexp""runtime""strings""time"
)// ModelInfo stores model information
type ModelInfo struct {Name     stringID       stringSize     stringModified string
}func main() {reader := bufio.NewReader(os.Stdin)fmt.Println("╔════════════════════════════════════════╗")fmt.Println("║     Ollama Model Export/Import Tool    ║")fmt.Println("╚════════════════════════════════════════╝")fmt.Println()fmt.Println("Select operation:")fmt.Println("  [1] Export Model")fmt.Println("  [2] Import Model")fmt.Println()fmt.Print("Enter option (1/2): ")choice, _ := reader.ReadString('\n')choice = strings.TrimSpace(choice)switch choice {case "1":exportModel(reader)case "2":importModel(reader)default:fmt.Println("Invalid option, exiting")os.Exit(1)}
}func exportModel(reader *bufio.Reader) {fmt.Println("\n--- Export Mode ---\n")models := getModelList()if len(models) == 0 {fmt.Println("No Ollama models found")return}fmt.Println("Installed models:")fmt.Println(strings.Repeat("-", 80))fmt.Printf("%-5s %-40s %-20s %s\n", "No.", "Name", "Size", "Modified")fmt.Println(strings.Repeat("-", 80))for i, model := range models {fmt.Printf("%-5d %-40s %-20s %s\n", i+1, model.Name, model.Size, model.Modified)}fmt.Println(strings.Repeat("-", 80))fmt.Println()fmt.Print("Enter model number to export: ")input, _ := reader.ReadString('\n')input = strings.TrimSpace(input)var selectedIndex intif _, err := fmt.Sscanf(input, "%d", &selectedIndex); err != nil || selectedIndex < 1 || selectedIndex > len(models) {fmt.Println("Invalid selection")return}selectedModel := models[selectedIndex-1]fmt.Printf("\nSelected model: %s\n", selectedModel.Name)defaultDir := filepath.Join(getHomeDir(), "Downloads", strings.ReplaceAll(selectedModel.Name, ":", "_"))fmt.Printf("Enter export directory (default: %s): ", defaultDir)targetDir, _ := reader.ReadString('\n')targetDir = strings.TrimSpace(targetDir)if targetDir == "" {targetDir = defaultDir}fmt.Printf("\nWill export model to: %s\n", targetDir)fmt.Print("Confirm export? (y/n): ")confirm, _ := reader.ReadString('\n')if strings.ToLower(strings.TrimSpace(confirm)) != "y" {fmt.Println("Operation cancelled")return}if err := performExport(selectedModel.Name, targetDir); err != nil {fmt.Printf("Export failed: %v\n", err)os.Exit(1)}
}func getModelList() []ModelInfo {cmd := exec.Command("ollama", "ls")output, err := cmd.Output()if err != nil {return nil}var models []ModelInfolines := strings.Split(string(output), "\n")for i, line := range lines {if i == 0 || strings.TrimSpace(line) == "" {continue}fields := strings.Fields(line)if len(fields) >= 4 {model := ModelInfo{Name:     fields[0],ID:       fields[1],Size:     fields[2],Modified: strings.Join(fields[3:], " "),}models = append(models, model)}}return models
}func performExport(modelName, targetDir string) error {fmt.Println("\nStarting export...")if err := os.MkdirAll(targetDir, 0755); err != nil {return fmt.Errorf("failed to create directory: %v", err)}fmt.Println("[1/3] Exporting Modelfile...")modelfilePath := filepath.Join(targetDir, "Modelfile")cmd := exec.Command("ollama", "show", "--modelfile", modelName)output, err := cmd.Output()if err != nil {return fmt.Errorf("failed to get Modelfile: %v", err)}if err := os.WriteFile(modelfilePath, output, 0644); err != nil {return fmt.Errorf("failed to save Modelfile: %v", err)}fmt.Println("[2/3] Parsing model metadata...")digest, err := getModelDigest(modelName)if err != nil {return fmt.Errorf("failed to get model digest: %v", err)}fmt.Println("[3/3] Copying model file...")ollamaHome := getOllamaHome()sourcePath := filepath.Join(ollamaHome, "models", "blobs", digest)targetPath := filepath.Join(targetDir, "model.gguf")if err := copyFile(sourcePath, targetPath); err != nil {return fmt.Errorf("failed to copy model file: %v", err)}fmt.Println("\nExport successful!")fmt.Printf("Export directory: %s\n", targetDir)fmt.Println("\nFiles:")files, _ := os.ReadDir(targetDir)for _, file := range files {info, _ := file.Info()size := formatBytes(info.Size())fmt.Printf("  - %s (%s)\n", file.Name(), size)}return nil
}func getModelDigest(modelName string) (string, error) {parts := strings.Split(modelName, ":")tag := "latest"namePart := modelNameif len(parts) == 2 {namePart = parts[0]tag = parts[1]}ollamaHome := getOllamaHome()manifestPath := filepath.Join(ollamaHome, "models", "manifests")var manifestFile stringif strings.Contains(namePart, "/") {manifestFile = filepath.Join(manifestPath, namePart, tag)} else {manifestFile = filepath.Join(manifestPath, "registry.ollama.ai", "library", namePart, tag)}data, err := os.ReadFile(manifestFile)if err != nil {altPath := filepath.Join(manifestPath, "registry.ollama.ai", "library", strings.ToLower(namePart), tag)data, err = os.ReadFile(altPath)if err != nil {return "", fmt.Errorf("failed to read manifest: %v", err)}manifestFile = altPath}var manifest struct {Layers []struct {MediaType string `json:"mediaType"`Digest    string `json:"digest"`} `json:"layers"`}if err := json.Unmarshal(data, &manifest); err != nil {return "", fmt.Errorf("failed to parse manifest: %v", err)}for _, layer := range manifest.Layers {if layer.MediaType == "application/vnd.ollama.image.model" {digest := strings.Replace(layer.Digest, ":", "-", 1)return digest, nil}}return "", fmt.Errorf("model layer not found")
}func importModel(reader *bufio.Reader) {fmt.Println("\n--- Import Mode ---\n")fmt.Print("Enter model folder path (contains Modelfile and model.gguf): ")modelDir, _ := reader.ReadString('\n')modelDir = strings.TrimSpace(modelDir)modelDir = strings.Trim(modelDir, `"'`)if _, err := os.Stat(modelDir); os.IsNotExist(err) {fmt.Printf("Directory does not exist: %s\n", modelDir)return}modelfilePath := filepath.Join(modelDir, "Modelfile")modelFilePath := filepath.Join(modelDir, "model.gguf")if _, err := os.Stat(modelfilePath); os.IsNotExist(err) {fmt.Println("Error: Modelfile not found")return}if _, err := os.Stat(modelFilePath); os.IsNotExist(err) {fmt.Println("Error: model.gguf not found")return}fmt.Println("\nReading Modelfile...")content, err := os.ReadFile(modelfilePath)if err != nil {fmt.Printf("Failed to read Modelfile: %v\n", err)return}contentStr := string(content)re := regexp.MustCompile(`(?m)^FROM\s+.*$`)modifiedContent := re.ReplaceAllString(contentStr, "FROM ./model.gguf")tempModelfile := filepath.Join(modelDir, "Modelfile.import")if err := os.WriteFile(tempModelfile, []byte(modifiedContent), 0644); err != nil {fmt.Printf("Failed to write temp Modelfile: %v\n", err)return}defer os.Remove(tempModelfile)suggestedName := extractModelName(string(content), modelDir)fmt.Printf("\nSuggested model name: %s\n", suggestedName)fmt.Print("Enter new model name (press Enter to use suggested): ")newName, _ := reader.ReadString('\n')newName = strings.TrimSpace(newName)if newName == "" {newName = suggestedName}existingModels := getExistingModelNames()finalName := resolveModelName(newName, existingModels)if finalName != newName {fmt.Printf("\nModel name '%s' already exists, using: %s\n", newName, finalName)}fmt.Printf("\nWill import model '%s' from: %s\n", finalName, modelDir)fmt.Print("Confirm import? (y/n): ")confirm, _ := reader.ReadString('\n')if strings.ToLower(strings.TrimSpace(confirm)) != "y" {fmt.Println("Operation cancelled")return}fmt.Println("\nStarting import...")cmd := exec.Command("ollama", "create", finalName, "-f", tempModelfile)cmd.Dir = modelDircmd.Stdout = os.Stdoutcmd.Stderr = os.Stderrif err := cmd.Run(); err != nil {fmt.Printf("\nImport failed: %v\n", err)os.Exit(1)}fmt.Printf("\nImport successful! Model name: %s\n", finalName)fmt.Println("Usage: ollama run", finalName)
}func extractModelName(modelfileContent, modelDir string) string {lines := strings.Split(modelfileContent, "\n")for _, line := range lines {line = strings.TrimSpace(line)if strings.HasPrefix(line, "FROM ") {fromPath := strings.TrimSpace(strings.TrimPrefix(line, "FROM"))if strings.HasSuffix(fromPath, ".gguf") {base := filepath.Base(fromPath)base = strings.TrimSuffix(base, ".gguf")base = sanitizeModelName(base)if base != "" {return base}}}}base := filepath.Base(modelDir)base = sanitizeModelName(base)if base == "" {base = "imported-model"}return base
}func sanitizeModelName(name string) string {name = strings.ToLower(name)var result strings.Builderfor _, r := range name {switch {case (r >= 'a' && r <= 'z') || (r >= '0' && r <= '9') || r == '_' || r == '.' || r == '-':result.WriteRune(r)case r == ' ' || r == '\\' || r == '/' || r == ':':result.WriteRune('-')}}cleaned := result.String()for strings.Contains(cleaned, "--") {cleaned = strings.ReplaceAll(cleaned, "--", "-")}cleaned = strings.Trim(cleaned, "-")return cleaned
}func getExistingModelNames() []string {cmd := exec.Command("ollama", "list")output, err := cmd.Output()if err != nil {return []string{}}var names []stringlines := strings.Split(string(output), "\n")for i, line := range lines {if i == 0 || strings.TrimSpace(line) == "" {continue}fields := strings.Fields(line)if len(fields) > 0 {names = append(names, fields[0])}}return names
}func resolveModelName(name string, existing []string) string {if !contains(existing, name) {return name}baseName := nametag := "latest"if idx := strings.LastIndex(name, ":"); idx != -1 {baseName = name[:idx]tag = name[idx+1:]}timestamp := time.Now().Format("20060102-1504")newName := fmt.Sprintf("%s-%s:%s", baseName, timestamp, tag)if !contains(existing, newName) {return newName}timestamp = time.Now().Format("20060102-150405")newName = fmt.Sprintf("%s-%s:%s", baseName, timestamp, tag)if !contains(existing, newName) {return newName}for i := 1; i < 1000; i++ {newName = fmt.Sprintf("%s-%s-%d:%s", baseName, timestamp, i, tag)if !contains(existing, newName) {return newName}}return newName
}func contains(slice []string, item string) bool {for _, s := range slice {if s == item {return true}}return false
}func getHomeDir() string {home, err := os.UserHomeDir()if err != nil {return "."}return home
}func getOllamaHome() string {if ollamaHome := os.Getenv("OLLAMA_HOME"); ollamaHome != "" {return ollamaHome}home := getHomeDir()if runtime.GOOS == "windows" {return filepath.Join(home, ".ollama")}return filepath.Join(home, ".ollama")
}func copyFile(src, dst string) error {input, err := os.ReadFile(src)if err != nil {return err}return os.WriteFile(dst, input, 0644)
}func formatBytes(bytes int64) string {const (KB = 1024MB = 1024 * KBGB = 1024 * MB)switch {case bytes >= GB:return fmt.Sprintf("%.2f GB", float64(bytes)/GB)case bytes >= MB:return fmt.Sprintf("%.2f MB", float64(bytes)/MB)case bytes >= KB:return fmt.Sprintf("%.2f KB", float64(bytes)/KB)default:return fmt.Sprintf("%d B", bytes)}
}

效果:

(base) workspace@macbook2022 exp307-mqtt-ollama % ./ollama_export_and_import_2603081900 
╔════════════════════════════════════════╗
║     Ollama Model Export/Import Tool    ║
╚════════════════════════════════════════╝Select operation:[1] Export Model[2] Import ModelEnter option (1/2): 1--- Export Mode ---Installed models:
--------------------------------------------------------------------------------
No.   Name                                     Size                 Modified
--------------------------------------------------------------------------------
1     qwen3:4b                                 11                   GB 5 weeks ago
2     modelscope.cn/unsloth/Qwen3-Coder-30B-A3B-Instruct-1M-GGUF:Qwen3-Coder-30B-A3B-Instruct-1M-UD-Q2_K_XL.gguf 11                   GB 5 weeks ago
3     modelscope.cn/Qwen/Qwen3-VL-4B-Instruct-GGUF:latest 3.3                  GB 5 weeks ago
4     qwen3:0.6b                               522                  MB 9 months ago
5     qllama/bge-reranker-large:latest         604                  MB 11 months ago
6     bge-m3:latest                            1.2                  GB 11 months ago
--------------------------------------------------------------------------------Enter model number to export: 4Selected model: qwen3:0.6b
Enter export directory (default: /Users/workspace/Downloads/qwen3_0.6b): Will export model to: /Users/workspace/Downloads/qwen3_0.6b
Confirm export? (y/n): yStarting export...
[1/3] Exporting Modelfile...
[2/3] Parsing model metadata...
[3/3] Copying model file...Export successful!
Export directory: /Users/workspace/Downloads/qwen3_0.6bFiles:- Modelfile (12.92 KB)- model.gguf (498.43 MB)
(base) workspace@macbook2022 exp307-mqtt-ollama % ./ollama_export_and_import_2603081900
╔════════════════════════════════════════╗
║     Ollama Model Export/Import Tool    ║
╚════════════════════════════════════════╝Select operation:[1] Export Model[2] Import ModelEnter option (1/2): 2--- Import Mode ---Enter model folder path (contains Modelfile and model.gguf): /Users/workspace/Downloads/qwen3_0.6bReading Modelfile...Suggested model name: qwen3_0.6b
Enter new model name (press Enter to use suggested): Will import model 'qwen3_0.6b' from: /Users/workspace/Downloads/qwen3_0.6b
Confirm import? (y/n): yStarting import...
gathering model components 
copying file sha256:7f4030143c1c477224c5434f8272c662a8b042079a0a584f0a27a1684fe2e1fa 100% 
parsing GGUF 
using existing layer sha256:7f4030143c1c477224c5434f8272c662a8b042079a0a584f0a27a1684fe2e1fa 
using existing layer sha256:eb4402837c7829a690fa845de4d7f3fd842c2adee476d5341da8a46ea9255175 
using existing layer sha256:d18a5cc71b84bc4af394a31116bd3932b42241de70c77d2b76d69a314ec8aa12 
using existing layer sha256:cff3f395ef3756ab63e58b0ad1b32bb6f802905cae1472e6a12034e4246fbbdb 
writing manifest 
success Import successful! Model name: qwen3_0.6b
Usage: ollama run qwen3_0.6b
(base) workspace@macbook2022 exp307-mqtt-ollama % ollama ls
NAME                                                                                                          ID              SIZE      MODIFIED      
qwen3_0.6b:latest                                                                                             d8203e03137e    522 MB    3 seconds ago    
qwen3:4b                                                                                                      e363c0c8e5ca    11 GB     5 weeks ago      
modelscope.cn/unsloth/Qwen3-Coder-30B-A3B-Instruct-1M-GGUF:Qwen3-Coder-30B-A3B-Instruct-1M-UD-Q2_K_XL.gguf    e363c0c8e5ca    11 GB     5 weeks ago      
modelscope.cn/Qwen/Qwen3-VL-4B-Instruct-GGUF:latest                                                           7b8954ee27f7    3.3 GB    5 weeks ago      
qwen3:0.6b                                                                                                    3bae9c93586b    522 MB    9 months ago     
qllama/bge-reranker-large:latest                                                                              1327c18a2be0    604 MB    11 months ago    
bge-m3:latest                                                                                                 790764642607    1.2 GB    11 months ago    
http://www.jsqmd.com/news/450921/

相关文章:

  • cv_resnet101_face-detection模型与Java八股文精粹:深入JVM内存管理与多线程调用
  • 智能工作流调度:重新定义多任务处理的效率革命
  • BAAI/bge-m3企业应用:文档去重与知识库语义验证方案
  • 效率工具重塑设计协作:如何通过HTML转Figma实现工作流无缝迁移
  • CosyVoice入门必看:C语言基础概念语音教学课件生成
  • 高效管理《方舟:生存进化》服务器的开源自动化运维工具全解析
  • 游戏存档丢失怎么办?3DS玩家必备的JKSM工具拯救指南
  • 盒马鲜生礼品卡怎么换成现金?专业平台解锁闲置价值 - 京顺回收
  • Redis管理工具效率提升指南:RedisDesktopManager全面解析
  • CosyVoice语音克隆案例展示:克隆老板声音做会议通知,效果逼真
  • Janus-Pro-7B开发利器:Typora搭配模型进行Markdown文档智能辅助写作
  • Youtu-Parsing构建智能Web应用:前端交互与实时解析展示
  • ChatGLM3-6B-128K与SpringBoot集成:企业级AI服务开发
  • 智能宠物喂食毕业设计:从零搭建嵌入式控制与云端联动系统
  • Ubuntu系统优化:图片旋转判断服务的GPU加速配置
  • Qwen3-VL-8B与Git工作流结合:自动生成代码变更的图文更新日志
  • 文件安全守护者:HashCheck哈希验证工具全解析
  • YOLO12从部署到应用:完整实战教程,覆盖监控、相册、质检多场景
  • 利用CasRel模型进行软件测试报告自动化分析:提取缺陷与关联模块
  • 开源项目Masa Mods汉化包完整指南:从部署到深度定制
  • 乙巳马年皇城大门春联生成终端W模型微调教程:使用自有数据集定制专属风格
  • 文件校验工具HashCheck:保护Windows文件安全的必备利器
  • TensorFlow-v2.15实战成果:房价预测模型效果与代码分享
  • GLM-4-9B-Chat-1M效果展示:vLLM部署实测,Chainlit前端对话体验惊艳
  • Flutter 三方库 ollama 的鸿蒙化适配指南 - 掌控边缘 AI 资产、本地大模型治理实战、鸿蒙级智能专家
  • DAMOYOLO-S多模型集成实战:融合不同骨干网络提升检测鲁棒性
  • KKS-HF Patch技术解析:从问题诊断到架构优化的完整指南
  • Qwen-Image-2512-Pixel-Art-LoRA部署教程:NVIDIA驱动版本兼容性验证(535+)
  • Lychee Rerank MM惊艳效果展示:图文-图文重排序在跨模态检索中的SOTA匹配案例
  • 如何用一款工具解决方舟服务器90%的管理难题:从新手到专家的全流程指南