Reference Sheet
Python
Python Environments, Packages, Modules, Cache
# Conda environment management
conda create -n transcriptions python=3.10.20
conda activate transcriptions
conda deactivate
conda remove --name transcriptions --all # --all removes all packages inside that environment
# venv virtual environment management (windows)
python -m venv venv # create
venv\Scripts\activate # activate
deactivate # deactivate
rmdir /s /q <venv_folder_name> # delete
# venv virtual environment management (posix)
sudo apt install python3-venv #! extra install may be required for venv on Linux/MacOS
python3 -m venv venv # create
source venv/bin/activate # activate
source deactivate # deactivate
rm -rf venv/ # delete
# PIP: Install regular and specific package versions with PIP
pip install numpy openai websockets
pip install torch==2.8.0 torchvision==0.23.0 torchaudio==2.8.0 --index-url https://download.pytorch.org/whl/cu128
pip install -r requirements.txt
# MISC
python -m module # runs a module as a script
python -m pip install numpy # -m to guarantee running pip tied to exact python interpreter
python -m pip install --upgrade pip # upgrade pip itself
pip list --outdated --format=columns # list upgradable packages
pip install --upgrade package_name # upgrade specific package
pip cache dir # where cache is
pip cache info # size, file count, location
pip cache list # list names of cached packages
pip cache remove package_name # remove specific package from cache
pip cache purge # clear entire cache
Python Imports: modules, absolute, relative
# Assumed structure
project/
├── main.py
├── utils/
│ ├── __init__.py
│ ├── math_utils.py
│ └── io_utils.py
└── models/
├── __init__.py
└── user.py
# import entire module + objects their object refs
import utils.math_utils
utils.math_utils.add(1, 2)
# import specific objects (func/class)
from utils.math_utils import add, subtract
add(1, 2)
# import and rename
from utils.math_utils import add as add_numbers
add_numbers(1, 2)
# import everying
from utils.math_utils import *
# absolute imports
from utils.math_utils import add
from models.user import User
# relative imports inside packages (!not in top-level script)
from .math_utils import add # from same directory
from ..utils.math_utils import add # from parent directory
from ..utils import io_utils # from sibling via parent
from ...core.config import settings # from two levels up
python -m project.main #! relative imports require running as module
Python paths
# easy mode (any OS)
from pathlib import Path
p = Path("dir") / "subdir" / "file.txt" # safely join path
p = Path(r"C:\Users\masly\file.txt") # handle Windows path
p = Path(r"C:\Users\masly\file.txt").as_posix() # normalize to forward slash
print(p.resolve()) # absolute path
p.exists(); p.is_file(); p.is_dir() # exist; type; type
p.name; p.parent; p.suffix # base name; dir name (full if resolved); extension
here = Path(__file__).parent # script location
home = Path.home() # ~/
import glob
csv_files = glob.glob("root/**/*.csv", recursive=True) # find all CSV in subdirectories of root and get their full paths
csv_files = list(Path("root").rglob("*.csv")) # same thing but Path objects
csv_files = list(Path("root").glob("**/*.csv")) # same thing but Path objects
# Manual Windows Paths handling
windows_path = r"C:\Users\masly\projects\file.txt"
windows_path_fixed = windows_path.replace("\\", "/")
wsl_path = windows_path_fixed.replace("C:/", "/mnt/c")
windows_path_fixed = wsl_path.replace("/mnt/c/", "C:/")
Environment Variables
Linux/macOS (bash/zsh)
export API_KEY="sk-..." # set in current session (shell/terminal)
echo $MY_VAR # read
API_KEY="sk-..." python main.py # only set env var for this command
echo 'export API_KEY="sk-..."' >> ~/.bashrc && source ~/.bashrc # Linux persist in shell cfg
echo 'export API_KEY="sk-..."' >> ~/.zshrc && source ~/.zshrc # macOS persist in shell cfg
Windows
# CMD
set API_KEY=sk-... # set ! no double quotes
echo %API_KEY% # read
setx API_KEY "sk-..." # persist (user-level)
# PowerShell
$env:API_KEY="sk-..." # set
$env:API_KEY # read
[System.Environment]::SetEnvironmentVariable("API_KEY","sk-...","User") # persist (user-level)
Python
import os
import subprocess
if "API_KEY" not in os.environ: print("nu-uh") # check existence
os.environ["API_KEY"] = "sk-..." # set (current process)
value = os.environ.get("API_KEY") # read
subprocess.run(["python", "script.py"]) # subprocess inherits!
# Copy and overwrite before subprocessing
env = os.environ.copy()
env["API_KEY"] = "sk-2..."
subprocess.run(["python", "script.py"], env=env)
Linux
Linux PIDs & Open Files
# List active processes: a = all users; u = user-formatted columns; x = include non-terminal processes (e.g., daemons)
ps aux
# List python programs from current user running in terminals
ps -au | grep python
pgrep -a -u $USER python
# Kill specific process ID
kill 76531
kill -9 76531 # SIGKILL
# Kill all python apps running from this user
pkill -u $USER python
lsof -i :8081 # list open files on port 8081 -> what is serving
Apt package manager
sudo apt update # update sources (refresh package lists)
sudo apt upgrade # upgrade installed packages
sudo apt install nginx # install package
sudo apt install nginx=1.18.0-0ubuntu1 # install specific package version
sudo apt install ./package.deb # install manually downloaded package
sudo apt reinstall nginx # reinstall package
sudo apt full-upgrade # handle dependencies/removals
apt search nginx # search for specific packages
apt show nginx # package details
apt list --installed
apt list --upgradable
sudo apt remove nginx # keeps config files
sudo apt purge nginx # removes configs too
sudo apt autoremove # autoremove unused depenencides
sudo apt clean # remove all cached .deb files
sudo apt autoclean # remove only obsolete
sudo apt --fix-broken install
sudo nano /etc/apt/sources.list # edit sources
sudo nano /etc/apt/sources.list.d/myrepo.list # add new repo file (example entry: `deb http://archive.ubuntu.com/ubuntu jammy main universe`)
sudo add-apt-repository --remove ppa:deadsnakes/ppa # remove repository: specific one
sudo rm /etc/apt/sources.list.d/myrepo.list # remove repository: delete repository list file
# repo keys & MISC
sudo apt-key list
sudo mkdir -p /etc/apt/keyrings # update repo key
curl -fsSL https://example.com/key.gpg | sudo tee /etc/apt/keyrings/example.gpg > /dev/null # udpate repo key
deb [signed-by=/etc/apt/keyrings/example.gpg] https://example.com repo main # reference key in source
sudo do-release-upgrade # update sources
TODO: systemctl/systemd
Bash shell settings
# ~/.bashrc: executed by bash(1) for non-login shells.
# ...
# HISTORY SETTINGS
# Don't put duplicate lines or lines starting with space in the history.
HISTCONTROL=ignoreboth
# Append to the history file, don't overwrite it
shopt -s histappend
# History size
HISTSIZE=1000
HISTFILESIZE=2000
# Immediate history writes (not on exit)
PROMPT_COMMAND='history -a; history -n'
# PS1 PROMPT (PS1)
# Format: [Time] User@Host:Directory$
# Colors: \[\e[32m\] is Green, \[\e[34m\] is Blue, \[\e[m\] is Reset
PS1="[\[\e[33m\]\t\[\e[m\]] \[\e[32m\]\u@\h\[\e[m\]:\[\e[34m\]\w\[\e[m\]\$ "
Shell text editing
VIM
[sudo] vi file.txt# open file.txt in vimi# insert mode (type)Esc# normal mode (command input):w# save:wqor:xorZZ# save and quit:q# quit (no save):q!orZQ# force quit (discard changes)
Nano
[sudo] nano file.txt# open file.txt using nanoCtrl + O# saveCtrl + X# exitReplace entire contents:
Ctrl + / -> 1 # go to start of line 1 Ctrl + ^ # set mark Ctrl + / -> large line (or Ctrl + V) # go to end of last line Ctrl + K # cut selection Ctrl + Shift + V # Paste system copy buffer
Find executables based on $PATH
# Linux, macOS -- find executable that will run for a command (first match in PATH)
which python # may miss aliases
type python # better
whereis python # find bin + src + man
# Windows CMD -- show all matches of executable in PATH
where python
Linux & MacOS commands
history | grep python # show lines containing "python"
<command> & # run job in background
CTRL + Z # put in background
fg # bring job into foreground
nohup <command> & # ignore hangup command (shell close / ssh disconnect) and start in background
TODO “screen”
- List directory
ls -lahF-l = long; -a = show hidden; -h = readable sizes; -F = classify (file type)ls -ltrlist sorted by timestamp in reverse orderls -ltclist sorted by change timels -lSlist sorted by sizels -lXlist sorted by extensionls -1short format, separated by linesAdd alias to
~/.bashrcecho 'alias ll="ls -lahF --color=auto"' >> ~/.bashrc && source ~/.bashrc # linux echo 'alias ll="ls -lah --color=auto"' >> ~/.zshrc && source ~/.zshrc # macls -1A | wc -lcount num files + dirs + hidden
- less, grep, find
- disk free df, disk usage du
- uname,
- create user,
- change password,
- chmod, ch permissions
- .bashrc + create aliases
- File management
- Move
mvmv file.txt /dest/-> move file keep fnamemv file.txt newname.txt-> move file renamemv folder/ /dest/-> move directory
- Copy
cpcp file.txt /dest/-> copy file keep fnamecp file.txt /dest/newname.txt-> copy file renamecp -r folder/ /dest/-> copy dir recursivecp -a folder/ /dest/-> copy dir preserve metadatacp -a /source/folder/*.png /dest/folder/-> copy all PNGS keep filenames and metadata
- Remove
rmrm file.txt-> remove single filerm -r ./dirname-> remove dir contentsrm -rf ./dirname-> remove dir without questions
- Move
- Redirection operators (
>,>>,|)>write stdout to file:echo "hello" > file.txt(overwrites if file exists)>>append redirect stdout:echo "hello" >> file.txt|pipe stdout -> stdin:cat file.txt | grep hello- STDERR (2)
- redirect stderr:
command 2> err.txt - redirect both:
command > all.txt 2>&1 - pipe strout,stderr -> stdin:
command 2>&1 | grep error
- redirect stderr:
- Frequent combinations
ls -1 > files.txt# save outputecho "log" >> app.log# append logmake > build.log 2>&1# capture everythingcat file | sort | uniq# chain commands
Windows
winget (Windows package manager)
winget list # list installed packages
winget list --source winget # only list manageable with winget
winget search vscode # search by package name
winget show Microsoft.VisualStudioCode # package details (versions, installer info)
winget install voidtools.Everything # install package via lookup
winget install -e --id WinDirStat.WinDirStat # install specific package via ID
# Must haves
winget install voidtools.Everything WinDirStat.WinDirStat FFmpeg Klocman.BulkCrapUninstaller
winget install VideoLAN.VLC Rufus.Rufus IrfanSkiljan.IrfanView IrfanSkiljan.IrfanView.PlugIns OBSProject.OBSStudio Notepad++.Notepad++
winget upgrade # show packages with available upgrades
winget upgrade WinSCP.WinSCP # upgrade a specific page
winget upgrade --id <Package.ID> # via specific package ID (more precise)
winget upgrade --all # upgrade all packages
winget uninstall --id <Package.ID> # uninstall specific package
winget source list # list available sources (repos)
winget source update # update sources
winget settings # open config file
CMD
dir /a | findstr "sandbox" # list all and pipe to string match (like ls -la | grep sandbox)
dir /a | findstr /i "sAnDboX" # case insensitive
Powershell
Get-ChildItem -Force | Where-Object { $_.Name -match "sandbox" } # list subdiles/subdirs with "sandbox" substring in name
macOS
~/.zshrc
# Prompt customization
PROMPT='%F{242}[%*]%f %F{33}%d%f %# '
Homebrew (MacOS package management)
/bin/bash -c "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/HEAD/install.sh)" # install
brew update # update brew itself + formulae
brew install wget # install specific package
brew uninstall wget # uninstall specific package
brew reinstall wget # reinstall specific package
brew upgrade wget # upgrade specific package
brew list # list installed
brew leaves # list top-level explicit installs
# cask (GUI apps)
brew install --cask google-chrome
brew list --cask
brew search python
brew info python
brew upgrade # upgrade all installed
brew cleanup # remove old versions
brew doctor # check for issues
brew outdated # list outdated packages
# Brew services
brew services start postgresql
brew services stop postgresql
brew services list
Other & Tools
TODO: regex
find . | grep "\.mp4" # find all files/dirs recursively with .mp4 as substr
ll | grep "\.mp4" # same but just pwd
grep '\.mp4' file.txt # find all lines with .mp4 substr in file.txt
rg "\.mp4$" file.tct # sudo apt install ripgrep, -N to hide linenums
grep -vE '^(\./survey/|\./demo/)' "files_2026-03-18.txt" > files_noextra.txt # filter down to files NOT containing one of those two strings
TODO: ffmpeg
ffprobe --hide-banner input.mp4 # full media info
ffprobe -v error -show_streams input.mp4 # stream info, codecs, bitrate
ffprobe -v error -show_format input.mp4 # format and container info
ffmpeg -i input.mp4 # quick info less structured
ffmpeg -i input.mkv -c:v h264_nvenc -qp 23 -profile:v high -pix_fmt yuv420p -preset:v p5 -rc:v vbr output.mp4 # reencode as mp4 using h264 nvidia encoder
ffmpeg -i URL -c copy output.mp4 # download stream as local file
ffmpeg -i input.mp4 output.wav # extract audio (no resampling)
ffmpeg -i input.mp4 -ac 1 -ar 16000 output.wav # extract audio + convert to mono 16 kHz
ffmpeg -i input.mp4 -ac 1 -ar 16000 -c:a pcm_s16le output.wav # PCM for common ASR format
ffmpeg -ss 00:01:00 -to 00:02:30 -i input.mp4 -c copy output.mp4 # cut video between 1 min and 2 min 30 sec (no reencode)
ffmpeg -ss 00:01:00 -to 00:02:30 -i input.mp4 -c:v libx264 -c:a aac output.mp4 # cut and reencode (slower)
ffmpeg -ss 00:01:00 -t 30 -i input.mp4 -c copy output.mp4 # cut 30 seconds from 1-min timestamp
# burn subtitles onto video
ffmpeg -i "video.mkv" -vf "subtitles='subs.srt'" -c:v libx264 -crf 20 -preset medium -c:a copy video_subs.mkv # CPU
ffmpeg -i "video.mp4" -vf "subtitles='subs.srt'" -c:v h264_nvenc -pix_fmt yuv420p -cq 25 -preset p5 -c:a copy video_subs.mkv # nvidia
ffmpeg -i "video.mkv" -vf "scale=-2:540,subtitles='subs.srt'" -c:v h264_nvenc -pix_fmt yuv420p -cq 25 -preset p5 -c:a copy video_subs.mkv # nvidia + rescale
- TODO: mac-specific convert videos and audio
- TODO: resize image
yt-dlp
python3 -m pip install -U "yt-dlp[default]" # install/upgrade with pip
yt-dlp -F https://www.youtube.com/watch?v=n8X9_MgEdCg # list available formats
yt-dlp -f 244+140 https://www.youtube.com/watch?v=n8X9_MgEdCg # download video format 244 (HD 480p) + audio format 140 (m4a 128kbps) !requires ffmpeg for merging
yt-dlp -o yt-dlp -o "%(title)s - %(upload_date)s.%(ext)s" "URL" # -o options: %(title)s: video title; %(uploader)s: channel; %(upload_date)s: YYYYMMDD; %(id)s: unique video ID.
yt-dlp -f 244+140 https://www.youtube.com/watch?v=n8X9_MgEdCg --print after_move:filepath # force output path on last line
Satisfy JS requirements
curl -fsSL https://deno.land/install.sh | sh # linux path: ~/.deno/bin/deno
winget install DenoLand.Deno # win path -> C:\Users\masly\.deno\bin\deno.exe
yt-dlp -F URL --js-runtimes "path/to/deno" --remote-components ejs:github # hardcode deno path
yt-dlp -F URL --js-runtimes deno --remote-components ejs:github # if deno path on OS is properly configured
Communication and file transfer
ssh -i ~/.ssh/id_rsa -p PORT user@host
scp -i ~/.ssh/id_rsa -P PORT file.txt user@host:/remote/path/ # upload (local -> remote)
scp -i ~/.ssh/id_rsa -P 2222 user@host:/remote/path/file.txt . # download (remote -> local)
scp -i ~/.ssh/id_rsa -P 2222 -r folder/ user@host:/remote/path/ # recursive copy
# -a = archive recursive + preserve metadata; -v = verbose; -z = compress during transfer; -P (--partial) = resume on same command run
rsync -avz -e "ssh -i ~/.ssh/id_rsa -p 2222" folder/ user@host:/remote/path/ # upload directory
rsync -avz -e "ssh -i ~/.ssh/id_rsa -p 2222" user@host:/remote/path/ folder/ # download directory
rsync -avz --delete -e "ssh -i ~/.ssh/id_rsa -p 2222" folder/ user@host:/remote/path/ # delete extra files
rsync -avz --progress -e "ssh -i ~/.ssh/id_rsa -p 2222" folder/ user@host:/remote/path/ # progress
TODO: FRP (fast reverse proxy)
TODO: netcat, simple ports comms tests
zip
sudo apt update && sudo apt install zip unzip
zip -r assets_backup_date.zip assets/ # zip directory recursively
unzip -l backup.zip # list contents
unzip assets_backup_date.zip -d path-to-assets/ # unzip to specific directory
wget
wget -O ~/Downloads/name.zip https://example.com/remote.zip # download + rename + location
wget -c -O ~/Downloads/name.zip https://example.com/remote.zip # resume download after interrupt
wget -P ./data/raw/ https://example.com/remote.json # keep original filename but save elsewhere
wget --limit-rate=500k https://example.com/remote.zip # limit download speed
wget -b https://example.com/massive-remote.tar.gz # download in background (-b)
tail -f wget-log # check progress
Git
Install, update, set identity
sudo add-apt-repository ppa:git-core/ppa -y && sudo apt update && sudo apt install git -y # Debian/Ubuntu install/update
git update-git-for-windows # update on windows
# plaintext. win: C:/Users/user/.gitconfig. mac/linux: ~/.gitconfig
git config --global user.name "Mykola Maslych"
git config --global user.email "maslychm@gmail.com"
git config --global --edit # open global config file to edit
# to keep email private, set it to private one provided by GitHub
# https://github.com/settings/emails
# https://docs.github.com/en/account-and-profile/how-tos/email-preferences/setting-your-commit-email-address
git config --global user.email XXXXXXXX+username@users.noreply.github.com
Basics
git status
git add . # stage all local changes
git commit -m "commit message" # commit staged changes
git restore --staged <file_name> # unstage file after "git add <file_name>"
git restore --staged . # unstage all staged files
git restore . #! discard unstaged changes
git reset HEAD~1 # undo last local commit, keep changes ustaged
git reset --soft HEAD~1 # undo last local commit, keep changes staged
git reset --hard HEAD~1 # undo last local commit, discard tracked changes
git branch -a # list all branches
git branch -vv # show relationships between local and origin branches
git log # interactive scrollable commit list. "q" to exit
git clean -n # preview untracked changes that will be deleted
git clean -fdx # delete all untracked files (-f), directories (-d), ignored (-x)
git reset --hard HEAD && git clean -fxd # nuke all local tracked and untracked changes
Rewrite commit history
git commit --amend --no-edit # combine staged with last commit (--amend), keep message (--no-edit)
git push origin main --force-with-lease # safe, fails if remote has moved on
git push origin main --force # overwrite remote history with local history
SQUASH last 3 commits into 1
Using
soft reset: keep changes from last 3 commits as staged -> commit all changes as 1 commitgit reset --soft HEAD~3 git commit -m "Single commit for the feature"Using interactive REBASE
git rebase -i HEAD~3 # -> keep "top" one as "pick" and and others as "s" -> save -> close. Example: --- pick a1b2c3d Initial feature logic s e5f6g7h Fix typo in logic s i9j0k1l Final touches for feature --- git push
Merge new branch into main (main stayed as of local ‘feature’ branch creation)
git switch -c feature # + make changes -> commit
git push -u origin feature # --set-upstream (-u) creates link between local feature
git checkout main
git merge feature
git push origin main
Merge new branch into main (main advanced after local ‘feature’ branch creation)
git switch -c feature # + make changes -> commit
git checkout main
git pull origin main
git checkout feature
git merge main
git checkout main
git merge feature
git push origin main
Rebase for linear history: merge new branch into main (main advanced after ‘feature’ branch creation)
## switch -> change -> commit -> push
git checkout main
git pull origin main
git checkout feature
git rebase main
git checkout main
git merge feature
Git terminology
remote - non-local version(s), can be multiple servers
origin - primary remote server.
upstream - source
- With forks: origin is the fork, upstream is the original repo. Typical flow: pull changes from upstream into local, push them to origin (fork), submit pull request to upstream.
- With branches: local
maintracks upstreamorigin/main
Common cases
origin is automatically set with
git clone <url>Manually set origin for new local project with
git init git remote add origin https://github.com/org/project.git git remote -v # verify, show fetch and push originsManually set origin for a new
featurebranchgit switch -c feature git push -u origin featureManually link a local branch
featuretoorigin/featurebranchgit branch --set-upstream-to=origin/feature featureChange remote URL
git remote set-url origin https://github.com/org/different-project.git
TODO: change commiter info and rewrite history (ex case: wrong email -> no account link)
TODO: configuring keys
SLURM
sbatch jobname.sh # start a job
squeue -u mmaslych # check jobs queue for user
scancel -u mmaslych # cancel all jobs
scancel -t RUNNING -u mmaslych # cancel only running jobs
srun --partition=gpu --gres=gpu:1 --cpus-per-task=4 --mem=16G --pty bash -i # interactive bash shell
sreport cluster UserUtilizationByAccount Users=mmaslych Start=2026-01-01 -t Minutes -T cpu,gres/gpu # check all utilization for user
sreport -T gres/gpu cluster AccountUtilizationByUser mmaslych Start=3/1/26 # check GPU minutes
sreport cluster AccountUtilizationByUser mmaslych Start=3/1/26 # check CPU minutes
latexdiff
brew install latexdiff # macOS install
sudo apt-get install latexdiff # Linux install
latexdiff v1/main.text v2/main.tex > diff.tex
Highlight changes between PDF versions
More instructions: https://www.overleaf.com/learn/latex/Articles/How_to_use_latexdiff_on_Overleaf
To only show additions in blue, change these lines in DIF preamble below (removes
\uwavefrom added, remove#1from deleted):-\providecommand{\DIFaddtex}[1]{{\protect\color{blue}\uwave{#1}}} %DIF PREAMBLE +\providecommand{\DIFaddtex}[1]{{\protect\color{blue}#1}} %DIF PREAMBLE -\providecommand{\DIFdeltex}[1]{{\protect\color{red}\sout{#1}}} %DIF PREAMBLE +\providecommand{\DIFdeltex}[1]{{\protect\color{red}\sout{}}} %DIF PREAMBLE
LLMs, Neural Nets, Applications
Huggingface
https://huggingface.co/docs/huggingface_hub/en/guides/manage-cache
pip install -U huggingface_hub
hf cache ls # list cached models
hf cache rm model/drbaph/OmniVoice-bf16 # remove specific ID
hf cache rm $(hf cache ls --filter "accessed>1y" -q) -y # remove all models not accessed for a year
Gaussian Splatting: WSL + CUDA
- Requirements: WSL + nvidia toolkit 12.6; build colmap; build opensplat
mkdir PROJNAME & cd PROJNAME
mkdir images & mkdir sparse
# Extract frames at 2 FPS (make sure to delete blurry)
ffmpeg -i ~/videos/IMG_0467.MOV -vf "fps=2" -qscale:v 2 images/frame_%04d.jpg
# colmap-style features/points (takes ~30 seconds on RTX4070 for 350 frames)
colmap feature_extractor --database_path database.db --image_path images --SiftExtraction.use_gpu 1
# match points between frames (takes ~4 minutes on RTX 4070 for 350 frames)
colmap exhaustive_matcher --database_path database.db --SiftMatching.use_gpu 1
# exhaustive_matcher -> match every frame with every frame
# sequential_matcher -> match only in order
# vocab_tree_matcher -> use .bin vocab to search for images and only match similar ones with each other
# actual mapping (takes ~15 minutes on RTX 4070 for 350 frames)
colmap mapper --database_path database.db --image_path images --output_path sparse
# train splat (takes anywhere between 20 minutes and 2 hours)
~/OpenSplat/build/opensplat . --colmap-image-path images -d 2 -o PROJNAME.ply -n 30000 -s 5000
~/OpenSplat/build/opensplat . --colmap-image-path images -d 2 -o hec208_d2_controlled.ply -n 45000 --sh-degree 2 --num-downscales 2 --resolution-schedule 2500 --warmup-length 1500 --refine-every 250 --densify-grad-thresh 0.001 --split-screen-size 0.12 --stop-screen-size-at 2000
# TODO splat viewer
llama.cpp
Build latest on Ubuntu with CUDA support
sudo apt install build-essential git cmake libcurl4-openssl-dev -y
sudo apt install nvidia-cuda-toolkit -y
sudo apt install openssl
sudo apt install gcc-10 g++-10 -y
git clone https://github.com/ggerganov/llama.cpp
cd llama.cpp
mkdir build && cd build
export FORCE_CMAKE=1
export CMAKE_ARGS="-DGGML_CUDA=on -DLLAMA_OPENSSL=ON -DCMAKE_CUDA_HOST_COMPILER=/usr/bin/g++-10 -DCMAKE_CXX_STANDARD=17"
cmake ..
cmake --build . --config Release -j $(nproc)
Run models
cd llama.cpp
./build/bin/llama-server -m ~/Downloads/Meta-Llama-3.1-8B-Instruct-Q4_K_M.gguf -ngl 999 --port 8082 --host 0.0.0.0 --ctx-size 16384 -lv 2
./build/bin/llama-server -hf ggml-org/gemma-4-E4B-it-GGUF
- TODO: continuous batching, prompt caching, async, logprobs
TODO: Ollama
- install on platforms
- set custom port