Skip to content

Instantly share code, notes, and snippets.

View Leo-Lee15's full-sized avatar
๐Ÿ˜
Busy!

Leo Lee Leo-Lee15

๐Ÿ˜
Busy!
  • China
View GitHub Profile
import time
from vllm import LLM, SamplingParams
from vllm.inputs import PromptType
from vllm.outputs import PoolingRequestOutput, RequestOutput
from typing import Union, cast, Sequence
from multiprocessing import Queue, Event
import threading
class MyLLM(LLM):
def keep_running(
@Leo-Lee15
Leo-Lee15 / pad_packed_demo.py
Created July 24, 2025 03:01 — forked from HarshTrivedi/pad_packed_demo.py
Minimal tutorial on packing (pack_padded_sequence) and unpacking (pad_packed_sequence) sequences in pytorch.
import torch
from torch import LongTensor
from torch.nn import Embedding, LSTM
from torch.autograd import Variable
from torch.nn.utils.rnn import pack_padded_sequence, pad_packed_sequence
## We want to run LSTM on a batch of 3 character sequences ['long_str', 'tiny', 'medium']
#
# Step 1: Construct Vocabulary
# Step 2: Load indexed data (list of instances, where each instance is list of character indices)
@Leo-Lee15
Leo-Lee15 / grpo_qwen-0-5b_single_t4.ipynb
Created April 7, 2025 03:29 — forked from qunash/grpo_qwen-0-5b_single_t4.ipynb
grpo_qwen-0-5b_single_t4.ipynb
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
@Leo-Lee15
Leo-Lee15 / mistral-convert.py
Created March 18, 2025 07:54 — forked from fakerybakery/mistral-convert.py
Convert the text portion of Mistral 3.1 -> HF format (IMPORTANT: Does not convert vision layers yet! The resulting model will be a text-only LLM.)
# Copyright 2023 Mistral AI and The HuggingFace Inc. team. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
@Leo-Lee15
Leo-Lee15 / generate_labels_with_deepseek.py
Created February 7, 2025 15:56 — forked from sayakpaul/generate_labels_with_deepseek.py
Generate labels with DeepSeek and `transformers`.
"""
Implementation of the label generation part in https://danielvanstrien.xyz/posts/2025/deepseek/distil-deepseek-modernbert.html
using `transformers` and DeepSeek.
"""
from transformers import AutoModelForCausalLM, AutoTokenizer
import torch
import re
import contextlib
import math
@Leo-Lee15
Leo-Lee15 / grpo_demo.py
Created January 27, 2025 09:02 — forked from willccbb/grpo_demo.py
GRPO Llama-1B
# train_grpo.py
import re
from datasets import load_dataset, Dataset
from transformers import AutoTokenizer
from peft import LoraConfig
from trl import GRPOConfig, GRPOTrainer
# Load and prep dataset
SYSTEM_PROMPT = """
@Leo-Lee15
Leo-Lee15 / lesson1-rf.ipynb
Created July 31, 2024 03:19 — forked from kumarsuraj9450/lesson1-rf.ipynb
courses/ml1/BHU ml competition/lesson1-rf.ipynb
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
import tiktoken
import langdetect
T = tiktoken.get_encoding("o200k_base")
length_dict = {}
for i in range(T.n_vocab):
try:
length_dict[i] = len(T.decode([i]))
except:
from torch.utils.data import DataLoader
import math
from sentence_transformers import models, losses
from sentence_transformers import SentencesDataset, LoggingHandler, SentenceTransformer, util, InputExample
from sentence_transformers.evaluation import EmbeddingSimilarityEvaluator, SimilarityFunction
import logging
from datetime import datetime
import sys
import os
import gzip
@Leo-Lee15
Leo-Lee15 / example.R
Created July 4, 2020 00:55 — forked from timelyportfolio/example.R
cell in rhansontable trigger modal with additional information
library(shiny)
library(htmltools)
library(rhandsontable)
library(dplyr)
rht <- rhandsontable(
head(mtcars) %>%
mutate(name = rownames(.)) %>%
select(name, everything()),
rowHeaders = NULL