Set 1 | Set 2 | Set 3 | Least Character Word |
---|---|---|---|
Apex | Aurora | Atom | Apex |
Blaze | Blitz | Bolt | Bolt |
Cipher | Cascade | Cobalt | Cipher |
Dynamo | Drift | Drift | Drift |
Echo | Eclipse | Ember | Echo |
Flux | Flare | Frost | Flux |
Gravitas | Glide | Grit | Grit |
Helix | Horizon | Haven | Helix |
https://huggingface.co/openbmb/MiniCPM-V-2_6
import torch
from PIL import Image
from transformers import AutoModelForCausalLM, AutoTokenizer
import requests
https://github.com/state-spaces/mamba
from transformers import MambaConfig, MambaForCausalLM, AutoTokenizer, AutoModelForCausalLM
import torch
from functools import partial
from collections import OrderedDict, defaultdict
import os
$ python bench_linear.py --bs 1
BS: 1, Latency: 0.389 ms, IC: 4096, OC: 11008, Samples: 100, Warmup: 10
$ python bench_linear.py --bs 128
BS: 128, Latency: 3.640 ms, IC: 4096, OC: 11008, Samples: 100, Warmup: 10
$ python bench_linear.py --bs 1024
BS: 1024, Latency: 41.244 ms, IC: 4096, OC: 11008, Samples: 100, Warmup: 10
import intel_extension_for_pytorch # requried for XPU
import torch
from bigdl.llm.transformers import AutoModelForCausalLM
from transformers import AutoTokenizer, pipeline
# model_id = "facebook/opt-1.3b"
# model_id = "meta-llama/Llama-2-7b"
model_id = "meta-llama/Llama-2-7b-chat-hf"
prompt = "I love the Avengers,"
import warnings
from transformers import AutoTokenizer
class PromptCreator:
def __init__(self, model_id):
self.tokenizer = AutoTokenizer.from_pretrained(model_id)
self.offset = len(self.tokenizer(self.tokenizer.special_tokens_map['bos_token'])['input_ids'])
self.samples = [
{
watcher = currentUser() AND resolution = Unresolved ORDER BY priority DESC, updated DESC
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import os | |
from huggingface_hub import snapshot_download | |
REPO_ID = "repo_id" | |
LOCAL_ROOT= "/hf-model" | |
LOCAL_DIR = os.path.join(LOCAL_ROOT, os.path.basename(REPO_ID)) | |
snapshot_download(repo_id=REPO_ID, local_dir=LOCAL_DIR, local_dir_use_symlinks=False) |
NewerOlder