|
1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759176017611762176317641765176617671768176917701771177217731774177517761777177817791780178117821783178417851786178717881789179017911792179317941795179617971798179918001801180218031804180518061807180818091810181118121813181418151816181718181819182018211822182318241825182618271828182918301831183218331834183518361837183818391840184118421843184418451846184718481849185018511852185318541855185618571858185918601861186218631864186518661867186818691870187118721873187418751876187718781879188018811882188318841885188618871888188918901891189218931894189518961897189818991900190119021903190419051906190719081909191019111912191319141915191619171918191919201921192219231924192519261927192819291930193119321933193419351936193719381939194019411942194319441945194619471948194919501951195219531954195519561957195819591960196119621963196419651966196719681969197019711972197319741975197619771978197919801981198219831984198519861987198819891990199119921993199419951996199719981999200020012002200320042005200620072008200920102011201220132014201520162017201820192020202120222023202420252026202720282029203020312032203320342035203620372038203920402041204220432044204520462047204820492050205120522053205420552056205720582059206020612062206320642065206620672068206920702071207220732074207520762077207820792080208120822083208420852086208720882089209020912092209320942095209620972098209921002101210221032104210521062107210821092110211121122113211421152116211721182119212021212122212321242125212621272128212921302131213221332134213521362137213821392140214121422143214421452146214721482149215021512152215321542155215621572158215921602161216221632164216521662167216821692170217121722173217421752176217721782179218021812182218321842185218621872188218921902191219221932194219521962197219821992200220122022203220422052206220722082209221022112212221322142215221622172218221922202221222222232224222522262227222822292230223122322233223422352236223722382239224022412242224322442245224622472248224922502251225222532254225522562257225822592260226122622263226422652266226722682269227022712272227322742275227622772278227922802281228222832284228522862287228822892290229122922293229422952296229722982299230023012302230323042305230623072308230923102311231223132314231523162317231823192320232123222323232423252326232723282329233023312332233323342335233623372338233923402341234223432344234523462347234823492350235123522353235423552356235723582359236023612362236323642365236623672368236923702371237223732374237523762377237823792380238123822383238423852386238723882389239023912392239323942395239623972398239924002401240224032404240524062407240824092410241124122413241424152416241724182419242024212422242324242425242624272428242924302431243224332434243524362437243824392440244124422443244424452446244724482449245024512452245324542455245624572458245924602461246224632464246524662467246824692470247124722473247424752476247724782479248024812482248324842485248624872488248924902491249224932494249524962497249824992500250125022503250425052506250725082509251025112512251325142515251625172518251925202521252225232524252525262527252825292530253125322533253425352536253725382539254025412542254325442545254625472548254925502551255225532554255525562557255825592560256125622563256425652566256725682569257025712572257325742575257625772578257925802581258225832584258525862587258825892590259125922593259425952596259725982599260026012602260326042605260626072608260926102611261226132614261526162617261826192620262126222623262426252626262726282629263026312632263326342635263626372638263926402641264226432644264526462647264826492650265126522653265426552656265726582659266026612662266326642665266626672668266926702671267226732674267526762677267826792680268126822683268426852686268726882689269026912692269326942695269626972698269927002701270227032704270527062707270827092710271127122713271427152716271727182719272027212722272327242725272627272728272927302731273227332734273527362737273827392740274127422743274427452746274727482749275027512752275327542755275627572758275927602761276227632764276527662767276827692770277127722773277427752776277727782779278027812782278327842785278627872788278927902791279227932794279527962797279827992800280128022803280428052806280728082809281028112812281328142815281628172818281928202821282228232824282528262827282828292830283128322833283428352836283728382839284028412842284328442845284628472848284928502851285228532854285528562857285828592860286128622863286428652866286728682869287028712872287328742875287628772878287928802881288228832884288528862887288828892890289128922893289428952896289728982899290029012902290329042905290629072908290929102911291229132914291529162917291829192920292129222923292429252926292729282929293029312932293329342935293629372938293929402941294229432944294529462947294829492950295129522953295429552956295729582959296029612962296329642965296629672968296929702971297229732974297529762977297829792980298129822983298429852986298729882989299029912992299329942995299629972998299930003001300230033004300530063007300830093010301130123013301430153016301730183019302030213022302330243025302630273028302930303031303230333034303530363037303830393040304130423043304430453046304730483049305030513052305330543055305630573058305930603061306230633064306530663067306830693070307130723073307430753076307730783079308030813082308330843085308630873088308930903091309230933094309530963097309830993100310131023103310431053106310731083109311031113112311331143115311631173118311931203121312231233124312531263127312831293130313131323133313431353136313731383139314031413142314331443145314631473148314931503151315231533154315531563157315831593160316131623163316431653166316731683169317031713172317331743175317631773178317931803181318231833184318531863187318831893190319131923193319431953196319731983199320032013202320332043205320632073208320932103211321232133214321532163217321832193220322132223223322432253226322732283229323032313232323332343235323632373238323932403241324232433244324532463247324832493250325132523253325432553256325732583259326032613262326332643265326632673268326932703271327232733274327532763277327832793280328132823283328432853286328732883289329032913292329332943295329632973298329933003301330233033304330533063307330833093310331133123313331433153316331733183319332033213322332333243325332633273328332933303331333233333334333533363337333833393340334133423343334433453346334733483349335033513352335333543355335633573358335933603361336233633364336533663367336833693370337133723373337433753376337733783379338033813382338333843385338633873388338933903391339233933394339533963397339833993400340134023403340434053406340734083409341034113412341334143415341634173418341934203421342234233424342534263427342834293430343134323433343434353436343734383439344034413442344334443445344634473448344934503451345234533454345534563457345834593460346134623463346434653466346734683469347034713472347334743475347634773478347934803481348234833484348534863487348834893490349134923493349434953496349734983499350035013502350335043505350635073508350935103511351235133514351535163517351835193520352135223523352435253526352735283529353035313532353335343535353635373538353935403541354235433544354535463547354835493550355135523553355435553556355735583559356035613562356335643565356635673568356935703571357235733574357535763577357835793580358135823583358435853586358735883589359035913592359335943595359635973598359936003601360236033604360536063607360836093610361136123613361436153616361736183619362036213622362336243625362636273628362936303631363236333634363536363637363836393640364136423643364436453646364736483649365036513652365336543655365636573658365936603661366236633664366536663667366836693670367136723673367436753676367736783679368036813682368336843685368636873688368936903691369236933694369536963697369836993700370137023703370437053706370737083709371037113712371337143715371637173718371937203721372237233724372537263727372837293730373137323733373437353736373737383739374037413742374337443745374637473748374937503751375237533754375537563757375837593760376137623763376437653766376737683769377037713772377337743775377637773778377937803781378237833784378537863787378837893790379137923793379437953796379737983799380038013802380338043805380638073808380938103811381238133814381538163817381838193820382138223823382438253826382738283829383038313832383338343835383638373838383938403841384238433844384538463847384838493850385138523853385438553856385738583859386038613862386338643865386638673868386938703871387238733874387538763877387838793880388138823883388438853886388738883889389038913892389338943895389638973898389939003901390239033904390539063907390839093910391139123913391439153916391739183919392039213922392339243925392639273928392939303931393239333934393539363937393839393940394139423943394439453946394739483949395039513952395339543955395639573958395939603961396239633964396539663967396839693970397139723973397439753976397739783979398039813982398339843985398639873988398939903991399239933994399539963997399839994000400140024003400440054006400740084009401040114012401340144015401640174018401940204021402240234024402540264027402840294030403140324033403440354036403740384039404040414042404340444045404640474048404940504051405240534054405540564057405840594060406140624063406440654066406740684069407040714072407340744075407640774078407940804081408240834084408540864087408840894090409140924093409440954096409740984099410041014102410341044105410641074108410941104111411241134114411541164117411841194120412141224123412441254126412741284129413041314132413341344135413641374138413941404141414241434144414541464147414841494150415141524153415441554156415741584159416041614162416341644165416641674168416941704171417241734174417541764177417841794180418141824183418441854186418741884189419041914192419341944195419641974198419942004201420242034204420542064207420842094210421142124213421442154216421742184219422042214222422342244225422642274228422942304231423242334234423542364237423842394240424142424243424442454246424742484249425042514252425342544255425642574258425942604261426242634264426542664267426842694270427142724273427442754276427742784279428042814282428342844285428642874288428942904291429242934294429542964297429842994300430143024303430443054306430743084309431043114312431343144315431643174318431943204321432243234324432543264327432843294330433143324333433443354336433743384339434043414342434343444345434643474348434943504351435243534354435543564357435843594360436143624363436443654366436743684369437043714372437343744375437643774378437943804381438243834384438543864387438843894390439143924393439443954396439743984399440044014402440344044405440644074408440944104411441244134414441544164417441844194420442144224423442444254426442744284429443044314432443344344435443644374438443944404441444244434444444544464447444844494450445144524453445444554456445744584459446044614462446344644465446644674468446944704471447244734474447544764477447844794480448144824483448444854486448744884489449044914492449344944495449644974498449945004501450245034504450545064507450845094510451145124513451445154516451745184519452045214522452345244525452645274528452945304531453245334534453545364537453845394540454145424543454445454546454745484549455045514552455345544555455645574558455945604561456245634564456545664567456845694570457145724573457445754576457745784579458045814582458345844585458645874588458945904591459245934594459545964597459845994600460146024603460446054606460746084609461046114612461346144615461646174618461946204621462246234624462546264627462846294630463146324633463446354636463746384639464046414642464346444645464646474648464946504651465246534654465546564657465846594660466146624663466446654666466746684669467046714672467346744675467646774678467946804681468246834684468546864687468846894690469146924693469446954696469746984699470047014702470347044705470647074708470947104711471247134714471547164717471847194720472147224723472447254726472747284729473047314732473347344735473647374738473947404741474247434744474547464747474847494750475147524753475447554756475747584759476047614762476347644765476647674768476947704771477247734774477547764777477847794780478147824783478447854786478747884789479047914792479347944795479647974798479948004801480248034804480548064807480848094810481148124813481448154816481748184819482048214822482348244825482648274828482948304831483248334834483548364837483848394840484148424843484448454846484748484849485048514852485348544855485648574858485948604861486248634864486548664867486848694870487148724873487448754876487748784879488048814882488348844885488648874888488948904891489248934894489548964897489848994900490149024903490449054906490749084909491049114912491349144915491649174918491949204921492249234924492549264927492849294930493149324933493449354936493749384939494049414942494349444945494649474948494949504951495249534954495549564957495849594960496149624963496449654966496749684969497049714972497349744975497649774978497949804981498249834984498549864987498849894990499149924993499449954996499749984999500050015002500350045005500650075008500950105011501250135014501550165017501850195020502150225023502450255026502750285029503050315032503350345035503650375038503950405041504250435044504550465047504850495050505150525053505450555056505750585059506050615062506350645065506650675068506950705071507250735074507550765077507850795080508150825083508450855086508750885089509050915092509350945095509650975098509951005101510251035104510551065107510851095110511151125113511451155116511751185119512051215122512351245125512651275128512951305131513251335134513551365137513851395140514151425143514451455146514751485149515051515152515351545155515651575158515951605161516251635164516551665167516851695170517151725173517451755176517751785179518051815182518351845185518651875188518951905191519251935194519551965197519851995200520152025203520452055206520752085209521052115212521352145215521652175218521952205221522252235224522552265227522852295230523152325233523452355236523752385239524052415242524352445245524652475248524952505251525252535254525552565257525852595260526152625263526452655266526752685269527052715272527352745275527652775278527952805281528252835284528552865287528852895290529152925293529452955296529752985299530053015302530353045305530653075308530953105311531253135314531553165317531853195320532153225323532453255326532753285329533053315332533353345335533653375338533953405341534253435344534553465347534853495350535153525353535453555356535753585359536053615362536353645365536653675368536953705371537253735374537553765377537853795380538153825383538453855386538753885389539053915392539353945395539653975398539954005401540254035404540554065407540854095410541154125413541454155416541754185419542054215422542354245425542654275428542954305431543254335434543554365437543854395440544154425443544454455446544754485449545054515452545354545455545654575458545954605461546254635464546554665467546854695470547154725473547454755476547754785479548054815482548354845485548654875488548954905491549254935494549554965497549854995500550155025503550455055506550755085509551055115512551355145515551655175518551955205521552255235524552555265527552855295530553155325533553455355536553755385539554055415542554355445545554655475548554955505551555255535554555555565557555855595560556155625563556455655566556755685569557055715572557355745575557655775578557955805581558255835584558555865587558855895590559155925593559455955596559755985599560056015602560356045605560656075608560956105611561256135614561556165617561856195620562156225623562456255626562756285629563056315632563356345635563656375638563956405641564256435644564556465647564856495650565156525653565456555656565756585659566056615662566356645665566656675668566956705671567256735674567556765677567856795680568156825683568456855686568756885689569056915692569356945695569656975698569957005701570257035704570557065707570857095710571157125713571457155716571757185719572057215722572357245725572657275728572957305731573257335734573557365737573857395740574157425743574457455746574757485749575057515752575357545755575657575758575957605761576257635764576557665767576857695770577157725773577457755776577757785779578057815782578357845785578657875788578957905791579257935794579557965797579857995800580158025803580458055806580758085809581058115812581358145815581658175818581958205821582258235824582558265827582858295830583158325833583458355836583758385839584058415842584358445845584658475848584958505851585258535854585558565857585858595860586158625863586458655866586758685869587058715872587358745875587658775878587958805881588258835884588558865887588858895890589158925893589458955896589758985899590059015902590359045905590659075908590959105911591259135914591559165917591859195920592159225923592459255926592759285929593059315932593359345935593659375938593959405941594259435944594559465947594859495950595159525953595459555956595759585959596059615962596359645965596659675968596959705971597259735974597559765977597859795980598159825983598459855986598759885989599059915992599359945995599659975998599960006001600260036004600560066007600860096010601160126013601460156016601760186019602060216022602360246025602660276028602960306031603260336034603560366037603860396040604160426043604460456046604760486049605060516052605360546055605660576058605960606061606260636064606560666067606860696070607160726073607460756076607760786079608060816082608360846085608660876088608960906091609260936094609560966097609860996100610161026103610461056106610761086109611061116112611361146115611661176118611961206121612261236124612561266127612861296130613161326133613461356136613761386139614061416142614361446145614661476148614961506151615261536154615561566157615861596160616161626163616461656166616761686169617061716172617361746175617661776178617961806181618261836184618561866187618861896190619161926193619461956196619761986199620062016202620362046205620662076208620962106211621262136214621562166217621862196220622162226223622462256226622762286229623062316232623362346235623662376238623962406241624262436244624562466247624862496250625162526253625462556256625762586259626062616262626362646265626662676268626962706271627262736274627562766277627862796280628162826283628462856286628762886289629062916292629362946295629662976298629963006301630263036304630563066307630863096310631163126313631463156316631763186319632063216322632363246325632663276328632963306331633263336334633563366337633863396340634163426343634463456346634763486349635063516352635363546355635663576358635963606361636263636364636563666367636863696370637163726373637463756376637763786379638063816382638363846385638663876388638963906391639263936394639563966397639863996400640164026403640464056406640764086409641064116412641364146415641664176418641964206421642264236424642564266427642864296430643164326433643464356436643764386439644064416442644364446445644664476448644964506451645264536454645564566457645864596460646164626463646464656466646764686469647064716472647364746475647664776478647964806481648264836484648564866487648864896490649164926493649464956496649764986499650065016502650365046505650665076508650965106511651265136514651565166517651865196520652165226523652465256526652765286529653065316532653365346535653665376538653965406541654265436544654565466547654865496550655165526553655465556556655765586559656065616562656365646565656665676568656965706571657265736574657565766577657865796580658165826583658465856586658765886589659065916592659365946595659665976598659966006601660266036604660566066607660866096610661166126613661466156616661766186619662066216622662366246625662666276628662966306631663266336634663566366637663866396640664166426643664466456646664766486649665066516652665366546655665666576658665966606661666266636664666566666667666866696670667166726673667466756676667766786679668066816682668366846685668666876688668966906691669266936694669566966697669866996700670167026703670467056706670767086709671067116712671367146715671667176718671967206721672267236724672567266727672867296730673167326733673467356736673767386739674067416742674367446745674667476748674967506751675267536754675567566757675867596760676167626763676467656766676767686769677067716772677367746775677667776778677967806781678267836784678567866787678867896790679167926793679467956796679767986799680068016802680368046805680668076808680968106811681268136814681568166817681868196820682168226823682468256826682768286829683068316832683368346835683668376838683968406841684268436844684568466847684868496850685168526853685468556856685768586859686068616862686368646865686668676868686968706871687268736874687568766877687868796880688168826883688468856886688768886889689068916892689368946895689668976898689969006901690269036904690569066907690869096910691169126913691469156916691769186919692069216922692369246925692669276928692969306931693269336934693569366937693869396940694169426943694469456946694769486949695069516952695369546955695669576958695969606961696269636964696569666967696869696970697169726973697469756976697769786979698069816982698369846985698669876988698969906991699269936994699569966997699869997000700170027003700470057006700770087009701070117012701370147015701670177018701970207021702270237024702570267027702870297030703170327033703470357036703770387039704070417042704370447045704670477048704970507051705270537054705570567057705870597060706170627063706470657066706770687069707070717072707370747075707670777078707970807081708270837084708570867087708870897090709170927093709470957096709770987099710071017102710371047105710671077108710971107111711271137114711571167117711871197120712171227123712471257126712771287129713071317132713371347135713671377138713971407141714271437144714571467147714871497150715171527153715471557156715771587159716071617162716371647165716671677168716971707171717271737174717571767177717871797180718171827183718471857186718771887189719071917192719371947195719671977198719972007201720272037204720572067207720872097210721172127213721472157216721772187219722072217222722372247225722672277228722972307231723272337234723572367237723872397240724172427243724472457246724772487249725072517252725372547255725672577258725972607261726272637264726572667267726872697270727172727273727472757276727772787279728072817282728372847285728672877288728972907291729272937294729572967297729872997300730173027303730473057306730773087309731073117312731373147315731673177318731973207321732273237324732573267327732873297330733173327333733473357336733773387339734073417342734373447345734673477348734973507351735273537354735573567357735873597360736173627363736473657366736773687369737073717372737373747375737673777378737973807381738273837384738573867387738873897390739173927393739473957396739773987399740074017402740374047405740674077408740974107411741274137414741574167417741874197420742174227423742474257426742774287429743074317432743374347435743674377438743974407441744274437444744574467447744874497450745174527453745474557456745774587459746074617462746374647465746674677468746974707471747274737474747574767477747874797480748174827483748474857486748774887489749074917492749374947495749674977498749975007501750275037504750575067507750875097510751175127513751475157516751775187519752075217522752375247525752675277528752975307531753275337534753575367537753875397540754175427543754475457546754775487549755075517552755375547555755675577558755975607561756275637564756575667567756875697570757175727573757475757576757775787579758075817582758375847585758675877588758975907591759275937594759575967597759875997600760176027603760476057606760776087609761076117612761376147615761676177618761976207621762276237624762576267627762876297630763176327633763476357636763776387639764076417642764376447645764676477648764976507651765276537654765576567657765876597660766176627663766476657666766776687669767076717672767376747675767676777678767976807681768276837684768576867687768876897690769176927693769476957696769776987699770077017702770377047705770677077708770977107711771277137714771577167717771877197720772177227723772477257726772777287729773077317732773377347735773677377738773977407741774277437744774577467747774877497750775177527753775477557756775777587759776077617762776377647765776677677768776977707771777277737774777577767777777877797780778177827783778477857786778777887789779077917792779377947795779677977798779978007801780278037804780578067807780878097810781178127813781478157816781778187819782078217822782378247825782678277828782978307831783278337834783578367837783878397840784178427843784478457846784778487849785078517852785378547855785678577858785978607861786278637864786578667867786878697870787178727873787478757876787778787879788078817882788378847885788678877888788978907891789278937894789578967897789878997900790179027903790479057906790779087909791079117912791379147915791679177918791979207921792279237924792579267927792879297930793179327933793479357936 |
-
- <!doctype html>
- <html lang="en" class="no-js">
- <head>
-
- <meta charset="utf-8">
- <meta name="viewport" content="width=device-width,initial-scale=1">
-
-
-
-
- <link rel="prev" href="../llama.native.llavaimageembed/">
-
-
- <link rel="next" href="../llama.native.nativelibraryconfig/">
-
- <link rel="icon" href="../../assets/images/favicon.png">
- <meta name="generator" content="mkdocs-1.4.3, mkdocs-material-9.1.20">
-
-
-
- <title>llama.native.nativeapi - LLamaSharp Documentation</title>
-
-
-
- <link rel="stylesheet" href="../../assets/stylesheets/main.eebd395e.min.css">
-
-
-
-
-
-
-
-
-
- <link rel="preconnect" href="https://fonts.gstatic.com" crossorigin>
- <link rel="stylesheet" href="https://fonts.googleapis.com/css?family=Roboto:300,300i,400,400i,700,700i%7CRoboto+Mono:400,400i,700,700i&display=fallback">
- <style>:root{--md-text-font:"Roboto";--md-code-font:"Roboto Mono"}</style>
-
-
-
- <script>__md_scope=new URL("../..",location),__md_hash=e=>[...e].reduce((e,_)=>(e<<5)-e+_.charCodeAt(0),0),__md_get=(e,_=localStorage,t=__md_scope)=>JSON.parse(_.getItem(t.pathname+"."+e)),__md_set=(e,_,t=localStorage,a=__md_scope)=>{try{t.setItem(a.pathname+"."+e,JSON.stringify(_))}catch(e){}}</script>
-
-
-
-
-
-
- </head>
-
-
- <body dir="ltr">
-
-
-
- <script>var palette=__md_get("__palette");if(palette&&"object"==typeof palette.color)for(var key of Object.keys(palette.color))document.body.setAttribute("data-md-color-"+key,palette.color[key])</script>
-
- <input class="md-toggle" data-md-toggle="drawer" type="checkbox" id="__drawer" autocomplete="off">
- <input class="md-toggle" data-md-toggle="search" type="checkbox" id="__search" autocomplete="off">
- <label class="md-overlay" for="__drawer"></label>
- <div data-md-component="skip">
-
-
- <a href="#nativeapi" class="md-skip">
- Skip to content
- </a>
-
- </div>
- <div data-md-component="announce">
-
- </div>
-
- <div data-md-color-scheme="default" data-md-component="outdated" hidden>
-
- </div>
-
-
-
-
-
-
- <header class="md-header md-header--shadow" data-md-component="header">
- <nav class="md-header__inner md-grid" aria-label="Header">
- <a href="../.." title="LLamaSharp Documentation" class="md-header__button md-logo" aria-label="LLamaSharp Documentation" data-md-component="logo">
-
-
- <svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 24 24"><path d="M12 8a3 3 0 0 0 3-3 3 3 0 0 0-3-3 3 3 0 0 0-3 3 3 3 0 0 0 3 3m0 3.54C9.64 9.35 6.5 8 3 8v11c3.5 0 6.64 1.35 9 3.54 2.36-2.19 5.5-3.54 9-3.54V8c-3.5 0-6.64 1.35-9 3.54Z"/></svg>
-
- </a>
- <label class="md-header__button md-icon" for="__drawer">
- <svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 24 24"><path d="M3 6h18v2H3V6m0 5h18v2H3v-2m0 5h18v2H3v-2Z"/></svg>
- </label>
- <div class="md-header__title" data-md-component="header-title">
- <div class="md-header__ellipsis">
- <div class="md-header__topic">
- <span class="md-ellipsis">
- LLamaSharp Documentation
- </span>
- </div>
- <div class="md-header__topic" data-md-component="header-topic">
- <span class="md-ellipsis">
-
- llama.native.nativeapi
-
- </span>
- </div>
- </div>
- </div>
-
-
-
- <label class="md-header__button md-icon" for="__search">
- <svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 24 24"><path d="M9.5 3A6.5 6.5 0 0 1 16 9.5c0 1.61-.59 3.09-1.56 4.23l.27.27h.79l5 5-1.5 1.5-5-5v-.79l-.27-.27A6.516 6.516 0 0 1 9.5 16 6.5 6.5 0 0 1 3 9.5 6.5 6.5 0 0 1 9.5 3m0 2C7 5 5 7 5 9.5S7 14 9.5 14 14 12 14 9.5 12 5 9.5 5Z"/></svg>
- </label>
- <div class="md-search" data-md-component="search" role="dialog">
- <label class="md-search__overlay" for="__search"></label>
- <div class="md-search__inner" role="search">
- <form class="md-search__form" name="search">
- <input type="text" class="md-search__input" name="query" aria-label="Search" placeholder="Search" autocapitalize="off" autocorrect="off" autocomplete="off" spellcheck="false" data-md-component="search-query" required>
- <label class="md-search__icon md-icon" for="__search">
- <svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 24 24"><path d="M9.5 3A6.5 6.5 0 0 1 16 9.5c0 1.61-.59 3.09-1.56 4.23l.27.27h.79l5 5-1.5 1.5-5-5v-.79l-.27-.27A6.516 6.516 0 0 1 9.5 16 6.5 6.5 0 0 1 3 9.5 6.5 6.5 0 0 1 9.5 3m0 2C7 5 5 7 5 9.5S7 14 9.5 14 14 12 14 9.5 12 5 9.5 5Z"/></svg>
- <svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 24 24"><path d="M20 11v2H8l5.5 5.5-1.42 1.42L4.16 12l7.92-7.92L13.5 5.5 8 11h12Z"/></svg>
- </label>
- <nav class="md-search__options" aria-label="Search">
-
- <button type="reset" class="md-search__icon md-icon" title="Clear" aria-label="Clear" tabindex="-1">
- <svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 24 24"><path d="M19 6.41 17.59 5 12 10.59 6.41 5 5 6.41 10.59 12 5 17.59 6.41 19 12 13.41 17.59 19 19 17.59 13.41 12 19 6.41Z"/></svg>
- </button>
- </nav>
-
- </form>
- <div class="md-search__output">
- <div class="md-search__scrollwrap" data-md-scrollfix>
- <div class="md-search-result" data-md-component="search-result">
- <div class="md-search-result__meta">
- Initializing search
- </div>
- <ol class="md-search-result__list" role="presentation"></ol>
- </div>
- </div>
- </div>
- </div>
- </div>
-
-
- </nav>
-
- </header>
-
- <div class="md-container" data-md-component="container">
-
-
-
-
-
-
- <main class="md-main" data-md-component="main">
- <div class="md-main__inner md-grid">
-
-
-
- <div class="md-sidebar md-sidebar--primary" data-md-component="sidebar" data-md-type="navigation" >
- <div class="md-sidebar__scrollwrap">
- <div class="md-sidebar__inner">
-
-
-
- <nav class="md-nav md-nav--primary" aria-label="Navigation" data-md-level="0">
- <label class="md-nav__title" for="__drawer">
- <a href="../.." title="LLamaSharp Documentation" class="md-nav__button md-logo" aria-label="LLamaSharp Documentation" data-md-component="logo">
-
-
- <svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 24 24"><path d="M12 8a3 3 0 0 0 3-3 3 3 0 0 0-3-3 3 3 0 0 0-3 3 3 3 0 0 0 3 3m0 3.54C9.64 9.35 6.5 8 3 8v11c3.5 0 6.64 1.35 9 3.54 2.36-2.19 5.5-3.54 9-3.54V8c-3.5 0-6.64 1.35-9 3.54Z"/></svg>
-
- </a>
- LLamaSharp Documentation
- </label>
-
- <ul class="md-nav__list" data-md-scrollfix>
-
-
-
-
-
-
-
-
- <li class="md-nav__item">
- <a href="../.." class="md-nav__link">
- Overview
- </a>
- </li>
-
-
-
-
-
-
-
-
-
-
- <li class="md-nav__item">
- <a href="../../QuickStart/" class="md-nav__link">
- Quick Start
- </a>
- </li>
-
-
-
-
-
-
-
-
-
-
- <li class="md-nav__item">
- <a href="../../Architecture/" class="md-nav__link">
- Architecture
- </a>
- </li>
-
-
-
-
-
-
-
-
-
-
- <li class="md-nav__item">
- <a href="../../FAQ/" class="md-nav__link">
- FAQ
- </a>
- </li>
-
-
-
-
-
-
-
-
-
-
- <li class="md-nav__item">
- <a href="../../ContributingGuide/" class="md-nav__link">
- Contributing Guide
- </a>
- </li>
-
-
-
-
-
-
-
-
-
-
-
- <li class="md-nav__item md-nav__item--nested">
-
-
-
-
- <input class="md-nav__toggle md-toggle " type="checkbox" id="__nav_6" >
-
-
-
- <label class="md-nav__link" for="__nav_6" id="__nav_6_label" tabindex="0">
- Tutorials
- <span class="md-nav__icon md-icon"></span>
- </label>
-
- <nav class="md-nav" data-md-level="1" aria-labelledby="__nav_6_label" aria-expanded="false">
- <label class="md-nav__title" for="__nav_6">
- <span class="md-nav__icon md-icon"></span>
- Tutorials
- </label>
- <ul class="md-nav__list" data-md-scrollfix>
-
-
-
-
-
-
- <li class="md-nav__item">
- <a href="../../Tutorials/NativeLibraryConfig/" class="md-nav__link">
- Customize the native library loading
- </a>
- </li>
-
-
-
-
-
-
-
-
-
- <li class="md-nav__item">
- <a href="../../Tutorials/Executors/" class="md-nav__link">
- Use executors
- </a>
- </li>
-
-
-
-
-
-
-
-
-
- <li class="md-nav__item">
- <a href="../../Tutorials/ChatSession/" class="md-nav__link">
- Use ChatSession
- </a>
- </li>
-
-
-
-
-
-
-
-
-
- <li class="md-nav__item">
- <a href="../../Tutorials/UnderstandLLamaContext/" class="md-nav__link">
- Understand LLamaContext
- </a>
- </li>
-
-
-
-
-
-
-
-
-
- <li class="md-nav__item">
- <a href="../../Tutorials/GetEmbeddings/" class="md-nav__link">
- Get embeddings
- </a>
- </li>
-
-
-
-
-
-
-
-
-
- <li class="md-nav__item">
- <a href="../../Tutorials/Quantization/" class="md-nav__link">
- Quantize the model
- </a>
- </li>
-
-
-
-
- </ul>
- </nav>
- </li>
-
-
-
-
-
-
-
-
-
-
-
- <li class="md-nav__item md-nav__item--nested">
-
-
-
-
- <input class="md-nav__toggle md-toggle " type="checkbox" id="__nav_7" >
-
-
-
- <label class="md-nav__link" for="__nav_7" id="__nav_7_label" tabindex="0">
- Integrations
- <span class="md-nav__icon md-icon"></span>
- </label>
-
- <nav class="md-nav" data-md-level="1" aria-labelledby="__nav_7_label" aria-expanded="false">
- <label class="md-nav__title" for="__nav_7">
- <span class="md-nav__icon md-icon"></span>
- Integrations
- </label>
- <ul class="md-nav__list" data-md-scrollfix>
-
-
-
-
-
-
- <li class="md-nav__item">
- <a href="../../Integrations/semantic-kernel/" class="md-nav__link">
- semantic-kernel integration
- </a>
- </li>
-
-
-
-
-
-
-
-
-
- <li class="md-nav__item">
- <a href="../../Integrations/kernel-memory/" class="md-nav__link">
- kernel-memory integration
- </a>
- </li>
-
-
-
-
-
-
-
-
-
- <li class="md-nav__item">
- <a href="../../Integrations/BotSharp.md" class="md-nav__link">
- BotSharp integration
- </a>
- </li>
-
-
-
-
-
-
-
-
-
- <li class="md-nav__item">
- <a href="../../Integrations/Langchain.md" class="md-nav__link">
- Langchain integration
- </a>
- </li>
-
-
-
-
- </ul>
- </nav>
- </li>
-
-
-
-
-
-
-
-
-
-
-
- <li class="md-nav__item md-nav__item--nested">
-
-
-
-
- <input class="md-nav__toggle md-toggle " type="checkbox" id="__nav_8" >
-
-
-
- <label class="md-nav__link" for="__nav_8" id="__nav_8_label" tabindex="0">
- Examples
- <span class="md-nav__icon md-icon"></span>
- </label>
-
- <nav class="md-nav" data-md-level="1" aria-labelledby="__nav_8_label" aria-expanded="false">
- <label class="md-nav__title" for="__nav_8">
- <span class="md-nav__icon md-icon"></span>
- Examples
- </label>
- <ul class="md-nav__list" data-md-scrollfix>
-
-
-
-
-
-
- <li class="md-nav__item">
- <a href="../../Examples/BatchedExecutorFork/" class="md-nav__link">
- Bacthed executor - multi-output to one input
- </a>
- </li>
-
-
-
-
-
-
-
-
-
- <li class="md-nav__item">
- <a href="../../Examples/BatchedExecutorGuidance/" class="md-nav__link">
- Batched executor - basic guidance
- </a>
- </li>
-
-
-
-
-
-
-
-
-
- <li class="md-nav__item">
- <a href="../../Examples/BatchedExecutorRewind/" class="md-nav__link">
- Batched executor - rewinding to an earlier state
- </a>
- </li>
-
-
-
-
-
-
-
-
-
- <li class="md-nav__item">
- <a href="../../Examples/ChatChineseGB2312/" class="md-nav__link">
- Chinese LLM - with GB2312 encoding
- </a>
- </li>
-
-
-
-
-
-
-
-
-
- <li class="md-nav__item">
- <a href="../../Examples/ChatSessionStripRoleName/" class="md-nav__link">
- ChatSession - stripping role names
- </a>
- </li>
-
-
-
-
-
-
-
-
-
- <li class="md-nav__item">
- <a href="../../Examples/ChatSessionWithHistory/" class="md-nav__link">
- ChatSession - with history
- </a>
- </li>
-
-
-
-
-
-
-
-
-
- <li class="md-nav__item">
- <a href="../../Examples/ChatSessionWithRestart/" class="md-nav__link">
- ChatSession - restarting
- </a>
- </li>
-
-
-
-
-
-
-
-
-
- <li class="md-nav__item">
- <a href="../../Examples/ChatSessionWithRoleName/" class="md-nav__link">
- ChatSession - Basic
- </a>
- </li>
-
-
-
-
-
-
-
-
-
- <li class="md-nav__item">
- <a href="../../Examples/CodingAssistant/" class="md-nav__link">
- Coding assistant
- </a>
- </li>
-
-
-
-
-
-
-
-
-
- <li class="md-nav__item">
- <a href="../../Examples/GetEmbeddings/" class="md-nav__link">
- Get embeddings
- </a>
- </li>
-
-
-
-
-
-
-
-
-
- <li class="md-nav__item">
- <a href="../../Examples/GrammarJsonResponse/" class="md-nav__link">
- Grammar - json response
- </a>
- </li>
-
-
-
-
-
-
-
-
-
- <li class="md-nav__item">
- <a href="../../Examples/InstructModeExecute/" class="md-nav__link">
- Instruct executor - basic
- </a>
- </li>
-
-
-
-
-
-
-
-
-
- <li class="md-nav__item">
- <a href="../../Examples/InteractiveModeExecute/" class="md-nav__link">
- Interactive executor - basic
- </a>
- </li>
-
-
-
-
-
-
-
-
-
- <li class="md-nav__item">
- <a href="../../Examples/KernelMemory/" class="md-nav__link">
- Kernel memory integration - basic
- </a>
- </li>
-
-
-
-
-
-
-
-
-
- <li class="md-nav__item">
- <a href="../../Examples/KernelMemorySaveAndLoad/" class="md-nav__link">
- Kernel-memory - save & load
- </a>
- </li>
-
-
-
-
-
-
-
-
-
- <li class="md-nav__item">
- <a href="../../Examples/LLavaInteractiveModeExecute/" class="md-nav__link">
- LLaVA - basic
- </a>
- </li>
-
-
-
-
-
-
-
-
-
- <li class="md-nav__item">
- <a href="../../Examples/LoadAndSaveSession/" class="md-nav__link">
- ChatSession - load & save
- </a>
- </li>
-
-
-
-
-
-
-
-
-
- <li class="md-nav__item">
- <a href="../../Examples/LoadAndSaveState/" class="md-nav__link">
- Executor - save/load state
- </a>
- </li>
-
-
-
-
-
-
-
-
-
- <li class="md-nav__item">
- <a href="../../Examples/QuantizeModel/" class="md-nav__link">
- Quantization
- </a>
- </li>
-
-
-
-
-
-
-
-
-
- <li class="md-nav__item">
- <a href="../../Examples/SemanticKernelChat/" class="md-nav__link">
- Semantic-kernel - chat
- </a>
- </li>
-
-
-
-
-
-
-
-
-
- <li class="md-nav__item">
- <a href="../../Examples/SemanticKernelMemory/" class="md-nav__link">
- Semantic-kernel - with kernel-memory
- </a>
- </li>
-
-
-
-
-
-
-
-
-
- <li class="md-nav__item">
- <a href="../../Examples/SemanticKernelPrompt/" class="md-nav__link">
- Semantic-kernel - basic
- </a>
- </li>
-
-
-
-
-
-
-
-
-
- <li class="md-nav__item">
- <a href="../../Examples/StatelessModeExecute/" class="md-nav__link">
- Stateless executor
- </a>
- </li>
-
-
-
-
-
-
-
-
-
- <li class="md-nav__item">
- <a href="../../Examples/TalkToYourself/" class="md-nav__link">
- Talk to yourself
- </a>
- </li>
-
-
-
-
- </ul>
- </nav>
- </li>
-
-
-
-
-
-
-
-
-
-
-
-
-
- <li class="md-nav__item md-nav__item--active md-nav__item--nested">
-
-
-
-
- <input class="md-nav__toggle md-toggle " type="checkbox" id="__nav_9" checked>
-
-
-
- <label class="md-nav__link" for="__nav_9" id="__nav_9_label" tabindex="0">
- API Reference
- <span class="md-nav__icon md-icon"></span>
- </label>
-
- <nav class="md-nav" data-md-level="1" aria-labelledby="__nav_9_label" aria-expanded="true">
- <label class="md-nav__title" for="__nav_9">
- <span class="md-nav__icon md-icon"></span>
- API Reference
- </label>
- <ul class="md-nav__list" data-md-scrollfix>
-
-
-
-
-
-
- <li class="md-nav__item">
- <a href="../" class="md-nav__link">
- index
- </a>
- </li>
-
-
-
-
-
-
-
-
-
- <li class="md-nav__item">
- <a href="../llama.abstractions.adaptercollection/" class="md-nav__link">
- llama.abstractions.adaptercollection
- </a>
- </li>
-
-
-
-
-
-
-
-
-
- <li class="md-nav__item">
- <a href="../llama.abstractions.icontextparams/" class="md-nav__link">
- llama.abstractions.icontextparams
- </a>
- </li>
-
-
-
-
-
-
-
-
-
- <li class="md-nav__item">
- <a href="../llama.abstractions.ihistorytransform/" class="md-nav__link">
- llama.abstractions.ihistorytransform
- </a>
- </li>
-
-
-
-
-
-
-
-
-
- <li class="md-nav__item">
- <a href="../llama.abstractions.iinferenceparams/" class="md-nav__link">
- llama.abstractions.iinferenceparams
- </a>
- </li>
-
-
-
-
-
-
-
-
-
- <li class="md-nav__item">
- <a href="../llama.abstractions.illamaexecutor/" class="md-nav__link">
- llama.abstractions.illamaexecutor
- </a>
- </li>
-
-
-
-
-
-
-
-
-
- <li class="md-nav__item">
- <a href="../llama.abstractions.illamaparams/" class="md-nav__link">
- llama.abstractions.illamaparams
- </a>
- </li>
-
-
-
-
-
-
-
-
-
- <li class="md-nav__item">
- <a href="../llama.abstractions.imodelparams/" class="md-nav__link">
- llama.abstractions.imodelparams
- </a>
- </li>
-
-
-
-
-
-
-
-
-
- <li class="md-nav__item">
- <a href="../llama.abstractions.itextstreamtransform/" class="md-nav__link">
- llama.abstractions.itextstreamtransform
- </a>
- </li>
-
-
-
-
-
-
-
-
-
- <li class="md-nav__item">
- <a href="../llama.abstractions.itexttransform/" class="md-nav__link">
- llama.abstractions.itexttransform
- </a>
- </li>
-
-
-
-
-
-
-
-
-
- <li class="md-nav__item">
- <a href="../llama.abstractions.loraadapter/" class="md-nav__link">
- llama.abstractions.loraadapter
- </a>
- </li>
-
-
-
-
-
-
-
-
-
- <li class="md-nav__item">
- <a href="../llama.abstractions.metadataoverride/" class="md-nav__link">
- llama.abstractions.metadataoverride
- </a>
- </li>
-
-
-
-
-
-
-
-
-
- <li class="md-nav__item">
- <a href="../llama.abstractions.metadataoverrideconverter/" class="md-nav__link">
- llama.abstractions.metadataoverrideconverter
- </a>
- </li>
-
-
-
-
-
-
-
-
-
- <li class="md-nav__item">
- <a href="../llama.abstractions.tensorsplitscollection/" class="md-nav__link">
- llama.abstractions.tensorsplitscollection
- </a>
- </li>
-
-
-
-
-
-
-
-
-
- <li class="md-nav__item">
- <a href="../llama.abstractions.tensorsplitscollectionconverter/" class="md-nav__link">
- llama.abstractions.tensorsplitscollectionconverter
- </a>
- </li>
-
-
-
-
-
-
-
-
-
- <li class="md-nav__item">
- <a href="../llama.antipromptprocessor/" class="md-nav__link">
- llama.antipromptprocessor
- </a>
- </li>
-
-
-
-
-
-
-
-
-
- <li class="md-nav__item">
- <a href="../llama.batched.alreadypromptedconversationexception/" class="md-nav__link">
- llama.batched.alreadypromptedconversationexception
- </a>
- </li>
-
-
-
-
-
-
-
-
-
- <li class="md-nav__item">
- <a href="../llama.batched.batchedexecutor/" class="md-nav__link">
- llama.batched.batchedexecutor
- </a>
- </li>
-
-
-
-
-
-
-
-
-
- <li class="md-nav__item">
- <a href="../llama.batched.cannotforkwhilerequiresinferenceexception/" class="md-nav__link">
- llama.batched.cannotforkwhilerequiresinferenceexception
- </a>
- </li>
-
-
-
-
-
-
-
-
-
- <li class="md-nav__item">
- <a href="../llama.batched.cannotmodifywhilerequiresinferenceexception/" class="md-nav__link">
- llama.batched.cannotmodifywhilerequiresinferenceexception
- </a>
- </li>
-
-
-
-
-
-
-
-
-
- <li class="md-nav__item">
- <a href="../llama.batched.cannotsamplerequiresinferenceexception/" class="md-nav__link">
- llama.batched.cannotsamplerequiresinferenceexception
- </a>
- </li>
-
-
-
-
-
-
-
-
-
- <li class="md-nav__item">
- <a href="../llama.batched.cannotsamplerequirespromptexception/" class="md-nav__link">
- llama.batched.cannotsamplerequirespromptexception
- </a>
- </li>
-
-
-
-
-
-
-
-
-
- <li class="md-nav__item">
- <a href="../llama.batched.conversation/" class="md-nav__link">
- llama.batched.conversation
- </a>
- </li>
-
-
-
-
-
-
-
-
-
- <li class="md-nav__item">
- <a href="../llama.batched.conversationextensions/" class="md-nav__link">
- llama.batched.conversationextensions
- </a>
- </li>
-
-
-
-
-
-
-
-
-
- <li class="md-nav__item">
- <a href="../llama.batched.experimentalbatchedexecutorexception/" class="md-nav__link">
- llama.batched.experimentalbatchedexecutorexception
- </a>
- </li>
-
-
-
-
-
-
-
-
-
- <li class="md-nav__item">
- <a href="../llama.chatsession-1/" class="md-nav__link">
- llama.chatsession-1
- </a>
- </li>
-
-
-
-
-
-
-
-
-
- <li class="md-nav__item">
- <a href="../llama.chatsession/" class="md-nav__link">
- llama.chatsession
- </a>
- </li>
-
-
-
-
-
-
-
-
-
- <li class="md-nav__item">
- <a href="../llama.common.authorrole/" class="md-nav__link">
- llama.common.authorrole
- </a>
- </li>
-
-
-
-
-
-
-
-
-
- <li class="md-nav__item">
- <a href="../llama.common.chathistory/" class="md-nav__link">
- llama.common.chathistory
- </a>
- </li>
-
-
-
-
-
-
-
-
-
- <li class="md-nav__item">
- <a href="../llama.common.fixedsizequeue-1/" class="md-nav__link">
- llama.common.fixedsizequeue-1
- </a>
- </li>
-
-
-
-
-
-
-
-
-
- <li class="md-nav__item">
- <a href="../llama.common.inferenceparams/" class="md-nav__link">
- llama.common.inferenceparams
- </a>
- </li>
-
-
-
-
-
-
-
-
-
- <li class="md-nav__item">
- <a href="../llama.common.mirostattype/" class="md-nav__link">
- llama.common.mirostattype
- </a>
- </li>
-
-
-
-
-
-
-
-
-
- <li class="md-nav__item">
- <a href="../llama.common.modelparams/" class="md-nav__link">
- llama.common.modelparams
- </a>
- </li>
-
-
-
-
-
-
-
-
-
- <li class="md-nav__item">
- <a href="../llama.exceptions.grammarexpectedname/" class="md-nav__link">
- llama.exceptions.grammarexpectedname
- </a>
- </li>
-
-
-
-
-
-
-
-
-
- <li class="md-nav__item">
- <a href="../llama.exceptions.grammarexpectednext/" class="md-nav__link">
- llama.exceptions.grammarexpectednext
- </a>
- </li>
-
-
-
-
-
-
-
-
-
- <li class="md-nav__item">
- <a href="../llama.exceptions.grammarexpectedprevious/" class="md-nav__link">
- llama.exceptions.grammarexpectedprevious
- </a>
- </li>
-
-
-
-
-
-
-
-
-
- <li class="md-nav__item">
- <a href="../llama.exceptions.grammarformatexception/" class="md-nav__link">
- llama.exceptions.grammarformatexception
- </a>
- </li>
-
-
-
-
-
-
-
-
-
- <li class="md-nav__item">
- <a href="../llama.exceptions.grammarunexpectedcharaltelement/" class="md-nav__link">
- llama.exceptions.grammarunexpectedcharaltelement
- </a>
- </li>
-
-
-
-
-
-
-
-
-
- <li class="md-nav__item">
- <a href="../llama.exceptions.grammarunexpectedcharrngelement/" class="md-nav__link">
- llama.exceptions.grammarunexpectedcharrngelement
- </a>
- </li>
-
-
-
-
-
-
-
-
-
- <li class="md-nav__item">
- <a href="../llama.exceptions.grammarunexpectedendelement/" class="md-nav__link">
- llama.exceptions.grammarunexpectedendelement
- </a>
- </li>
-
-
-
-
-
-
-
-
-
- <li class="md-nav__item">
- <a href="../llama.exceptions.grammarunexpectedendofinput/" class="md-nav__link">
- llama.exceptions.grammarunexpectedendofinput
- </a>
- </li>
-
-
-
-
-
-
-
-
-
- <li class="md-nav__item">
- <a href="../llama.exceptions.grammarunexpectedhexcharscount/" class="md-nav__link">
- llama.exceptions.grammarunexpectedhexcharscount
- </a>
- </li>
-
-
-
-
-
-
-
-
-
- <li class="md-nav__item">
- <a href="../llama.exceptions.grammarunknownescapecharacter/" class="md-nav__link">
- llama.exceptions.grammarunknownescapecharacter
- </a>
- </li>
-
-
-
-
-
-
-
-
-
- <li class="md-nav__item">
- <a href="../llama.exceptions.llamadecodeerror/" class="md-nav__link">
- llama.exceptions.llamadecodeerror
- </a>
- </li>
-
-
-
-
-
-
-
-
-
- <li class="md-nav__item">
- <a href="../llama.exceptions.loadweightsfailedexception/" class="md-nav__link">
- llama.exceptions.loadweightsfailedexception
- </a>
- </li>
-
-
-
-
-
-
-
-
-
- <li class="md-nav__item">
- <a href="../llama.exceptions.runtimeerror/" class="md-nav__link">
- llama.exceptions.runtimeerror
- </a>
- </li>
-
-
-
-
-
-
-
-
-
- <li class="md-nav__item">
- <a href="../llama.extensions.icontextparamsextensions/" class="md-nav__link">
- llama.extensions.icontextparamsextensions
- </a>
- </li>
-
-
-
-
-
-
-
-
-
- <li class="md-nav__item">
- <a href="../llama.extensions.imodelparamsextensions/" class="md-nav__link">
- llama.extensions.imodelparamsextensions
- </a>
- </li>
-
-
-
-
-
-
-
-
-
- <li class="md-nav__item">
- <a href="../llama.grammars.grammar/" class="md-nav__link">
- llama.grammars.grammar
- </a>
- </li>
-
-
-
-
-
-
-
-
-
- <li class="md-nav__item">
- <a href="../llama.grammars.grammarrule/" class="md-nav__link">
- llama.grammars.grammarrule
- </a>
- </li>
-
-
-
-
-
-
-
-
-
- <li class="md-nav__item">
- <a href="../llama.ichatmodel/" class="md-nav__link">
- llama.ichatmodel
- </a>
- </li>
-
-
-
-
-
-
-
-
-
- <li class="md-nav__item">
- <a href="../llama.llamacache/" class="md-nav__link">
- llama.llamacache
- </a>
- </li>
-
-
-
-
-
-
-
-
-
- <li class="md-nav__item">
- <a href="../llama.llamaembedder/" class="md-nav__link">
- llama.llamaembedder
- </a>
- </li>
-
-
-
-
-
-
-
-
-
- <li class="md-nav__item">
- <a href="../llama.llamamodel/" class="md-nav__link">
- llama.llamamodel
- </a>
- </li>
-
-
-
-
-
-
-
-
-
- <li class="md-nav__item">
- <a href="../llama.llamamodelv1/" class="md-nav__link">
- llama.llamamodelv1
- </a>
- </li>
-
-
-
-
-
-
-
-
-
- <li class="md-nav__item">
- <a href="../llama.llamaparams/" class="md-nav__link">
- llama.llamaparams
- </a>
- </li>
-
-
-
-
-
-
-
-
-
- <li class="md-nav__item">
- <a href="../llama.llamaquantizer/" class="md-nav__link">
- llama.llamaquantizer
- </a>
- </li>
-
-
-
-
-
-
-
-
-
- <li class="md-nav__item">
- <a href="../llama.llamastate/" class="md-nav__link">
- llama.llamastate
- </a>
- </li>
-
-
-
-
-
-
-
-
-
- <li class="md-nav__item">
- <a href="../llama.llamatransforms/" class="md-nav__link">
- llama.llamatransforms
- </a>
- </li>
-
-
-
-
-
-
-
-
-
- <li class="md-nav__item">
- <a href="../llama.llavaweights/" class="md-nav__link">
- llama.llavaweights
- </a>
- </li>
-
-
-
-
-
-
-
-
-
- <li class="md-nav__item">
- <a href="../llama.native.decoderesult/" class="md-nav__link">
- llama.native.decoderesult
- </a>
- </li>
-
-
-
-
-
-
-
-
-
- <li class="md-nav__item">
- <a href="../llama.native.ggmltype/" class="md-nav__link">
- llama.native.ggmltype
- </a>
- </li>
-
-
-
-
-
-
-
-
-
- <li class="md-nav__item">
- <a href="../llama.native.gpusplitmode/" class="md-nav__link">
- llama.native.gpusplitmode
- </a>
- </li>
-
-
-
-
-
-
-
-
-
- <li class="md-nav__item">
- <a href="../llama.native.llamabatch/" class="md-nav__link">
- llama.native.llamabatch
- </a>
- </li>
-
-
-
-
-
-
-
-
-
- <li class="md-nav__item">
- <a href="../llama.native.llamabeamsstate/" class="md-nav__link">
- llama.native.llamabeamsstate
- </a>
- </li>
-
-
-
-
-
-
-
-
-
- <li class="md-nav__item">
- <a href="../llama.native.llamabeamview/" class="md-nav__link">
- llama.native.llamabeamview
- </a>
- </li>
-
-
-
-
-
-
-
-
-
- <li class="md-nav__item">
- <a href="../llama.native.llamachatmessage/" class="md-nav__link">
- llama.native.llamachatmessage
- </a>
- </li>
-
-
-
-
-
-
-
-
-
- <li class="md-nav__item">
- <a href="../llama.native.llamacontextparams/" class="md-nav__link">
- llama.native.llamacontextparams
- </a>
- </li>
-
-
-
-
-
-
-
-
-
- <li class="md-nav__item">
- <a href="../llama.native.llamaftype/" class="md-nav__link">
- llama.native.llamaftype
- </a>
- </li>
-
-
-
-
-
-
-
-
-
- <li class="md-nav__item">
- <a href="../llama.native.llamagrammarelement/" class="md-nav__link">
- llama.native.llamagrammarelement
- </a>
- </li>
-
-
-
-
-
-
-
-
-
- <li class="md-nav__item">
- <a href="../llama.native.llamagrammarelementtype/" class="md-nav__link">
- llama.native.llamagrammarelementtype
- </a>
- </li>
-
-
-
-
-
-
-
-
-
- <li class="md-nav__item">
- <a href="../llama.native.llamakvcacheview/" class="md-nav__link">
- llama.native.llamakvcacheview
- </a>
- </li>
-
-
-
-
-
-
-
-
-
- <li class="md-nav__item">
- <a href="../llama.native.llamakvcacheviewcell/" class="md-nav__link">
- llama.native.llamakvcacheviewcell
- </a>
- </li>
-
-
-
-
-
-
-
-
-
- <li class="md-nav__item">
- <a href="../llama.native.llamakvcacheviewsafehandle/" class="md-nav__link">
- llama.native.llamakvcacheviewsafehandle
- </a>
- </li>
-
-
-
-
-
-
-
-
-
- <li class="md-nav__item">
- <a href="../llama.native.llamaloglevel/" class="md-nav__link">
- llama.native.llamaloglevel
- </a>
- </li>
-
-
-
-
-
-
-
-
-
- <li class="md-nav__item">
- <a href="../llama.native.llamamodelkvoverridetype/" class="md-nav__link">
- llama.native.llamamodelkvoverridetype
- </a>
- </li>
-
-
-
-
-
-
-
-
-
- <li class="md-nav__item">
- <a href="../llama.native.llamamodelmetadataoverride/" class="md-nav__link">
- llama.native.llamamodelmetadataoverride
- </a>
- </li>
-
-
-
-
-
-
-
-
-
- <li class="md-nav__item">
- <a href="../llama.native.llamamodelparams/" class="md-nav__link">
- llama.native.llamamodelparams
- </a>
- </li>
-
-
-
-
-
-
-
-
-
- <li class="md-nav__item">
- <a href="../llama.native.llamamodelquantizeparams/" class="md-nav__link">
- llama.native.llamamodelquantizeparams
- </a>
- </li>
-
-
-
-
-
-
-
-
-
- <li class="md-nav__item">
- <a href="../llama.native.llamanativebatch/" class="md-nav__link">
- llama.native.llamanativebatch
- </a>
- </li>
-
-
-
-
-
-
-
-
-
- <li class="md-nav__item">
- <a href="../llama.native.llamapoolingtype/" class="md-nav__link">
- llama.native.llamapoolingtype
- </a>
- </li>
-
-
-
-
-
-
-
-
-
- <li class="md-nav__item">
- <a href="../llama.native.llamapos/" class="md-nav__link">
- llama.native.llamapos
- </a>
- </li>
-
-
-
-
-
-
-
-
-
- <li class="md-nav__item">
- <a href="../llama.native.llamaropetype/" class="md-nav__link">
- llama.native.llamaropetype
- </a>
- </li>
-
-
-
-
-
-
-
-
-
- <li class="md-nav__item">
- <a href="../llama.native.llamaseqid/" class="md-nav__link">
- llama.native.llamaseqid
- </a>
- </li>
-
-
-
-
-
-
-
-
-
- <li class="md-nav__item">
- <a href="../llama.native.llamatoken/" class="md-nav__link">
- llama.native.llamatoken
- </a>
- </li>
-
-
-
-
-
-
-
-
-
- <li class="md-nav__item">
- <a href="../llama.native.llamatokendata/" class="md-nav__link">
- llama.native.llamatokendata
- </a>
- </li>
-
-
-
-
-
-
-
-
-
- <li class="md-nav__item">
- <a href="../llama.native.llamatokendataarray/" class="md-nav__link">
- llama.native.llamatokendataarray
- </a>
- </li>
-
-
-
-
-
-
-
-
-
- <li class="md-nav__item">
- <a href="../llama.native.llamatokendataarraynative/" class="md-nav__link">
- llama.native.llamatokendataarraynative
- </a>
- </li>
-
-
-
-
-
-
-
-
-
- <li class="md-nav__item">
- <a href="../llama.native.llamatokentype/" class="md-nav__link">
- llama.native.llamatokentype
- </a>
- </li>
-
-
-
-
-
-
-
-
-
- <li class="md-nav__item">
- <a href="../llama.native.llamavocabtype/" class="md-nav__link">
- llama.native.llamavocabtype
- </a>
- </li>
-
-
-
-
-
-
-
-
-
- <li class="md-nav__item">
- <a href="../llama.native.llavaimageembed/" class="md-nav__link">
- llama.native.llavaimageembed
- </a>
- </li>
-
-
-
-
-
-
-
-
-
-
-
- <li class="md-nav__item md-nav__item--active">
-
- <input class="md-nav__toggle md-toggle" type="checkbox" id="__toc">
-
-
-
-
-
- <label class="md-nav__link md-nav__link--active" for="__toc">
- llama.native.nativeapi
- <span class="md-nav__icon md-icon"></span>
- </label>
-
- <a href="./" class="md-nav__link md-nav__link--active">
- llama.native.nativeapi
- </a>
-
-
-
- <nav class="md-nav md-nav--secondary" aria-label="Table of contents">
-
-
-
-
-
-
- <label class="md-nav__title" for="__toc">
- <span class="md-nav__icon md-icon"></span>
- Table of contents
- </label>
- <ul class="md-nav__list" data-md-component="toc" data-md-scrollfix>
-
- <li class="md-nav__item">
- <a href="#methods" class="md-nav__link">
- Methods
- </a>
-
- <nav class="md-nav" aria-label="Methods">
- <ul class="md-nav__list">
-
- <li class="md-nav__item">
- <a href="#llama_sample_token_mirostatsafellamacontexthandle-llamatokendataarraynative-single-single-int32-single" class="md-nav__link">
- llama_sample_token_mirostat(SafeLLamaContextHandle, LLamaTokenDataArrayNative&, Single, Single, Int32, Single&)
- </a>
-
- <nav class="md-nav" aria-label="llama_sample_token_mirostat(SafeLLamaContextHandle, LLamaTokenDataArrayNative&, Single, Single, Int32, Single&)">
- <ul class="md-nav__list">
-
- <li class="md-nav__item">
- <a href="#parameters" class="md-nav__link">
- Parameters
- </a>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#returns" class="md-nav__link">
- Returns
- </a>
-
- </li>
-
- </ul>
- </nav>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#llama_sample_token_mirostat_v2safellamacontexthandle-llamatokendataarraynative-single-single-single" class="md-nav__link">
- llama_sample_token_mirostat_v2(SafeLLamaContextHandle, LLamaTokenDataArrayNative&, Single, Single, Single&)
- </a>
-
- <nav class="md-nav" aria-label="llama_sample_token_mirostat_v2(SafeLLamaContextHandle, LLamaTokenDataArrayNative&, Single, Single, Single&)">
- <ul class="md-nav__list">
-
- <li class="md-nav__item">
- <a href="#parameters_1" class="md-nav__link">
- Parameters
- </a>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#returns_1" class="md-nav__link">
- Returns
- </a>
-
- </li>
-
- </ul>
- </nav>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#llama_sample_token_greedysafellamacontexthandle-llamatokendataarraynative" class="md-nav__link">
- llama_sample_token_greedy(SafeLLamaContextHandle, LLamaTokenDataArrayNative&)
- </a>
-
- <nav class="md-nav" aria-label="llama_sample_token_greedy(SafeLLamaContextHandle, LLamaTokenDataArrayNative&)">
- <ul class="md-nav__list">
-
- <li class="md-nav__item">
- <a href="#parameters_2" class="md-nav__link">
- Parameters
- </a>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#returns_2" class="md-nav__link">
- Returns
- </a>
-
- </li>
-
- </ul>
- </nav>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#llama_sample_tokensafellamacontexthandle-llamatokendataarraynative" class="md-nav__link">
- llama_sample_token(SafeLLamaContextHandle, LLamaTokenDataArrayNative&)
- </a>
-
- <nav class="md-nav" aria-label="llama_sample_token(SafeLLamaContextHandle, LLamaTokenDataArrayNative&)">
- <ul class="md-nav__list">
-
- <li class="md-nav__item">
- <a href="#parameters_3" class="md-nav__link">
- Parameters
- </a>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#returns_3" class="md-nav__link">
- Returns
- </a>
-
- </li>
-
- </ul>
- </nav>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#llama_get_embeddingsg__llama_get_embeddings_native30_0safellamacontexthandle" class="md-nav__link">
- <llama_get_embeddings>g__llama_get_embeddings_native|30_0(SafeLLamaContextHandle)
- </a>
-
- <nav class="md-nav" aria-label="<llama_get_embeddings>g__llama_get_embeddings_native|30_0(SafeLLamaContextHandle)">
- <ul class="md-nav__list">
-
- <li class="md-nav__item">
- <a href="#parameters_4" class="md-nav__link">
- Parameters
- </a>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#returns_4" class="md-nav__link">
- Returns
- </a>
-
- </li>
-
- </ul>
- </nav>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#llama_token_to_pieceg__llama_token_to_piece_native44_0safellamamodelhandle-llamatoken-byte-int32" class="md-nav__link">
- <llama_token_to_piece>g__llama_token_to_piece_native|44_0(SafeLlamaModelHandle, LLamaToken, Byte*, Int32)
- </a>
-
- <nav class="md-nav" aria-label="<llama_token_to_piece>g__llama_token_to_piece_native|44_0(SafeLlamaModelHandle, LLamaToken, Byte*, Int32)">
- <ul class="md-nav__list">
-
- <li class="md-nav__item">
- <a href="#parameters_5" class="md-nav__link">
- Parameters
- </a>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#returns_5" class="md-nav__link">
- Returns
- </a>
-
- </li>
-
- </ul>
- </nav>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#tryloadlibrariesg__tryload84_0string" class="md-nav__link">
- <TryLoadLibraries>g__TryLoad|84_0(String)
- </a>
-
- <nav class="md-nav" aria-label="<TryLoadLibraries>g__TryLoad|84_0(String)">
- <ul class="md-nav__list">
-
- <li class="md-nav__item">
- <a href="#parameters_6" class="md-nav__link">
- Parameters
- </a>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#returns_6" class="md-nav__link">
- Returns
- </a>
-
- </li>
-
- </ul>
- </nav>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#tryloadlibrariesg__tryfindpath84_1string-c__displayclass84_0" class="md-nav__link">
- <TryLoadLibraries>g__TryFindPath|84_1(String, <>c__DisplayClass84_0&)
- </a>
-
- <nav class="md-nav" aria-label="<TryLoadLibraries>g__TryFindPath|84_1(String, <>c__DisplayClass84_0&)">
- <ul class="md-nav__list">
-
- <li class="md-nav__item">
- <a href="#parameters_7" class="md-nav__link">
- Parameters
- </a>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#returns_7" class="md-nav__link">
- Returns
- </a>
-
- </li>
-
- </ul>
- </nav>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#llama_set_n_threadssafellamacontexthandle-uint32-uint32" class="md-nav__link">
- llama_set_n_threads(SafeLLamaContextHandle, UInt32, UInt32)
- </a>
-
- <nav class="md-nav" aria-label="llama_set_n_threads(SafeLLamaContextHandle, UInt32, UInt32)">
- <ul class="md-nav__list">
-
- <li class="md-nav__item">
- <a href="#parameters_8" class="md-nav__link">
- Parameters
- </a>
-
- </li>
-
- </ul>
- </nav>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#llama_vocab_typesafellamamodelhandle" class="md-nav__link">
- llama_vocab_type(SafeLlamaModelHandle)
- </a>
-
- <nav class="md-nav" aria-label="llama_vocab_type(SafeLlamaModelHandle)">
- <ul class="md-nav__list">
-
- <li class="md-nav__item">
- <a href="#parameters_9" class="md-nav__link">
- Parameters
- </a>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#returns_8" class="md-nav__link">
- Returns
- </a>
-
- </li>
-
- </ul>
- </nav>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#llama_rope_typesafellamamodelhandle" class="md-nav__link">
- llama_rope_type(SafeLlamaModelHandle)
- </a>
-
- <nav class="md-nav" aria-label="llama_rope_type(SafeLlamaModelHandle)">
- <ul class="md-nav__list">
-
- <li class="md-nav__item">
- <a href="#parameters_10" class="md-nav__link">
- Parameters
- </a>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#returns_9" class="md-nav__link">
- Returns
- </a>
-
- </li>
-
- </ul>
- </nav>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#llama_grammar_initllamagrammarelement-uint64-uint64" class="md-nav__link">
- llama_grammar_init(LLamaGrammarElement, UInt64, UInt64)**
- </a>
-
- <nav class="md-nav" aria-label="llama_grammar_init(LLamaGrammarElement, UInt64, UInt64)**">
- <ul class="md-nav__list">
-
- <li class="md-nav__item">
- <a href="#parameters_11" class="md-nav__link">
- Parameters
- </a>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#returns_10" class="md-nav__link">
- Returns
- </a>
-
- </li>
-
- </ul>
- </nav>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#llama_grammar_freeintptr" class="md-nav__link">
- llama_grammar_free(IntPtr)
- </a>
-
- <nav class="md-nav" aria-label="llama_grammar_free(IntPtr)">
- <ul class="md-nav__list">
-
- <li class="md-nav__item">
- <a href="#parameters_12" class="md-nav__link">
- Parameters
- </a>
-
- </li>
-
- </ul>
- </nav>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#llama_grammar_copysafellamagrammarhandle" class="md-nav__link">
- llama_grammar_copy(SafeLLamaGrammarHandle)
- </a>
-
- <nav class="md-nav" aria-label="llama_grammar_copy(SafeLLamaGrammarHandle)">
- <ul class="md-nav__list">
-
- <li class="md-nav__item">
- <a href="#parameters_13" class="md-nav__link">
- Parameters
- </a>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#returns_11" class="md-nav__link">
- Returns
- </a>
-
- </li>
-
- </ul>
- </nav>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#llama_sample_grammarsafellamacontexthandle-llamatokendataarraynative-safellamagrammarhandle" class="md-nav__link">
- llama_sample_grammar(SafeLLamaContextHandle, LLamaTokenDataArrayNative&, SafeLLamaGrammarHandle)
- </a>
-
- <nav class="md-nav" aria-label="llama_sample_grammar(SafeLLamaContextHandle, LLamaTokenDataArrayNative&, SafeLLamaGrammarHandle)">
- <ul class="md-nav__list">
-
- <li class="md-nav__item">
- <a href="#parameters_14" class="md-nav__link">
- Parameters
- </a>
-
- </li>
-
- </ul>
- </nav>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#llama_grammar_accept_tokensafellamacontexthandle-safellamagrammarhandle-llamatoken" class="md-nav__link">
- llama_grammar_accept_token(SafeLLamaContextHandle, SafeLLamaGrammarHandle, LLamaToken)
- </a>
-
- <nav class="md-nav" aria-label="llama_grammar_accept_token(SafeLLamaContextHandle, SafeLLamaGrammarHandle, LLamaToken)">
- <ul class="md-nav__list">
-
- <li class="md-nav__item">
- <a href="#parameters_15" class="md-nav__link">
- Parameters
- </a>
-
- </li>
-
- </ul>
- </nav>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#llava_validate_embed_sizesafellamacontexthandle-safellavamodelhandle" class="md-nav__link">
- llava_validate_embed_size(SafeLLamaContextHandle, SafeLlavaModelHandle)
- </a>
-
- <nav class="md-nav" aria-label="llava_validate_embed_size(SafeLLamaContextHandle, SafeLlavaModelHandle)">
- <ul class="md-nav__list">
-
- <li class="md-nav__item">
- <a href="#parameters_16" class="md-nav__link">
- Parameters
- </a>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#returns_12" class="md-nav__link">
- Returns
- </a>
-
- </li>
-
- </ul>
- </nav>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#llava_image_embed_make_with_bytessafellavamodelhandle-int32-byte-int32" class="md-nav__link">
- llava_image_embed_make_with_bytes(SafeLlavaModelHandle, Int32, Byte[], Int32)
- </a>
-
- <nav class="md-nav" aria-label="llava_image_embed_make_with_bytes(SafeLlavaModelHandle, Int32, Byte[], Int32)">
- <ul class="md-nav__list">
-
- <li class="md-nav__item">
- <a href="#parameters_17" class="md-nav__link">
- Parameters
- </a>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#returns_13" class="md-nav__link">
- Returns
- </a>
-
- </li>
-
- </ul>
- </nav>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#llava_image_embed_make_with_filenamesafellavamodelhandle-int32-string" class="md-nav__link">
- llava_image_embed_make_with_filename(SafeLlavaModelHandle, Int32, String)
- </a>
-
- <nav class="md-nav" aria-label="llava_image_embed_make_with_filename(SafeLlavaModelHandle, Int32, String)">
- <ul class="md-nav__list">
-
- <li class="md-nav__item">
- <a href="#parameters_18" class="md-nav__link">
- Parameters
- </a>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#returns_14" class="md-nav__link">
- Returns
- </a>
-
- </li>
-
- </ul>
- </nav>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#llava_image_embed_freeintptr" class="md-nav__link">
- llava_image_embed_free(IntPtr)
- </a>
-
- <nav class="md-nav" aria-label="llava_image_embed_free(IntPtr)">
- <ul class="md-nav__list">
-
- <li class="md-nav__item">
- <a href="#parameters_19" class="md-nav__link">
- Parameters
- </a>
-
- </li>
-
- </ul>
- </nav>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#llava_eval_image_embedsafellamacontexthandle-safellavaimageembedhandle-int32-int32" class="md-nav__link">
- llava_eval_image_embed(SafeLLamaContextHandle, SafeLlavaImageEmbedHandle, Int32, Int32&)
- </a>
-
- <nav class="md-nav" aria-label="llava_eval_image_embed(SafeLLamaContextHandle, SafeLlavaImageEmbedHandle, Int32, Int32&)">
- <ul class="md-nav__list">
-
- <li class="md-nav__item">
- <a href="#parameters_20" class="md-nav__link">
- Parameters
- </a>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#returns_15" class="md-nav__link">
- Returns
- </a>
-
- </li>
-
- </ul>
- </nav>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#llama_model_quantizestring-string-llamamodelquantizeparams" class="md-nav__link">
- llama_model_quantize(String, String, LLamaModelQuantizeParams*)
- </a>
-
- <nav class="md-nav" aria-label="llama_model_quantize(String, String, LLamaModelQuantizeParams*)">
- <ul class="md-nav__list">
-
- <li class="md-nav__item">
- <a href="#parameters_21" class="md-nav__link">
- Parameters
- </a>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#returns_16" class="md-nav__link">
- Returns
- </a>
-
- </li>
-
- </ul>
- </nav>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#llama_sample_repetition_penaltiessafellamacontexthandle-llamatokendataarraynative-llamatoken-uint64-single-single-single" class="md-nav__link">
- llama_sample_repetition_penalties(SafeLLamaContextHandle, LLamaTokenDataArrayNative&, LLamaToken*, UInt64, Single, Single, Single)
- </a>
-
- <nav class="md-nav" aria-label="llama_sample_repetition_penalties(SafeLLamaContextHandle, LLamaTokenDataArrayNative&, LLamaToken*, UInt64, Single, Single, Single)">
- <ul class="md-nav__list">
-
- <li class="md-nav__item">
- <a href="#parameters_22" class="md-nav__link">
- Parameters
- </a>
-
- </li>
-
- </ul>
- </nav>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#llama_sample_apply_guidancesafellamacontexthandle-spansingle-readonlyspansingle-single" class="md-nav__link">
- llama_sample_apply_guidance(SafeLLamaContextHandle, Span<Single>, ReadOnlySpan<Single>, Single)
- </a>
-
- <nav class="md-nav" aria-label="llama_sample_apply_guidance(SafeLLamaContextHandle, Span<Single>, ReadOnlySpan<Single>, Single)">
- <ul class="md-nav__list">
-
- <li class="md-nav__item">
- <a href="#parameters_23" class="md-nav__link">
- Parameters
- </a>
-
- </li>
-
- </ul>
- </nav>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#llama_sample_apply_guidancesafellamacontexthandle-single-single-single" class="md-nav__link">
- llama_sample_apply_guidance(SafeLLamaContextHandle, Single, Single, Single)
- </a>
-
- <nav class="md-nav" aria-label="llama_sample_apply_guidance(SafeLLamaContextHandle, Single, Single, Single)">
- <ul class="md-nav__list">
-
- <li class="md-nav__item">
- <a href="#parameters_24" class="md-nav__link">
- Parameters
- </a>
-
- </li>
-
- </ul>
- </nav>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#llama_sample_softmaxsafellamacontexthandle-llamatokendataarraynative" class="md-nav__link">
- llama_sample_softmax(SafeLLamaContextHandle, LLamaTokenDataArrayNative&)
- </a>
-
- <nav class="md-nav" aria-label="llama_sample_softmax(SafeLLamaContextHandle, LLamaTokenDataArrayNative&)">
- <ul class="md-nav__list">
-
- <li class="md-nav__item">
- <a href="#parameters_25" class="md-nav__link">
- Parameters
- </a>
-
- </li>
-
- </ul>
- </nav>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#llama_sample_top_ksafellamacontexthandle-llamatokendataarraynative-int32-uint64" class="md-nav__link">
- llama_sample_top_k(SafeLLamaContextHandle, LLamaTokenDataArrayNative&, Int32, UInt64)
- </a>
-
- <nav class="md-nav" aria-label="llama_sample_top_k(SafeLLamaContextHandle, LLamaTokenDataArrayNative&, Int32, UInt64)">
- <ul class="md-nav__list">
-
- <li class="md-nav__item">
- <a href="#parameters_26" class="md-nav__link">
- Parameters
- </a>
-
- </li>
-
- </ul>
- </nav>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#llama_sample_top_psafellamacontexthandle-llamatokendataarraynative-single-uint64" class="md-nav__link">
- llama_sample_top_p(SafeLLamaContextHandle, LLamaTokenDataArrayNative&, Single, UInt64)
- </a>
-
- <nav class="md-nav" aria-label="llama_sample_top_p(SafeLLamaContextHandle, LLamaTokenDataArrayNative&, Single, UInt64)">
- <ul class="md-nav__list">
-
- <li class="md-nav__item">
- <a href="#parameters_27" class="md-nav__link">
- Parameters
- </a>
-
- </li>
-
- </ul>
- </nav>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#llama_sample_min_psafellamacontexthandle-llamatokendataarraynative-single-uint64" class="md-nav__link">
- llama_sample_min_p(SafeLLamaContextHandle, LLamaTokenDataArrayNative&, Single, UInt64)
- </a>
-
- <nav class="md-nav" aria-label="llama_sample_min_p(SafeLLamaContextHandle, LLamaTokenDataArrayNative&, Single, UInt64)">
- <ul class="md-nav__list">
-
- <li class="md-nav__item">
- <a href="#parameters_28" class="md-nav__link">
- Parameters
- </a>
-
- </li>
-
- </ul>
- </nav>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#llama_sample_tail_freesafellamacontexthandle-llamatokendataarraynative-single-uint64" class="md-nav__link">
- llama_sample_tail_free(SafeLLamaContextHandle, LLamaTokenDataArrayNative&, Single, UInt64)
- </a>
-
- <nav class="md-nav" aria-label="llama_sample_tail_free(SafeLLamaContextHandle, LLamaTokenDataArrayNative&, Single, UInt64)">
- <ul class="md-nav__list">
-
- <li class="md-nav__item">
- <a href="#parameters_29" class="md-nav__link">
- Parameters
- </a>
-
- </li>
-
- </ul>
- </nav>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#llama_sample_typicalsafellamacontexthandle-llamatokendataarraynative-single-uint64" class="md-nav__link">
- llama_sample_typical(SafeLLamaContextHandle, LLamaTokenDataArrayNative&, Single, UInt64)
- </a>
-
- <nav class="md-nav" aria-label="llama_sample_typical(SafeLLamaContextHandle, LLamaTokenDataArrayNative&, Single, UInt64)">
- <ul class="md-nav__list">
-
- <li class="md-nav__item">
- <a href="#parameters_30" class="md-nav__link">
- Parameters
- </a>
-
- </li>
-
- </ul>
- </nav>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#llama_sample_typicalsafellamacontexthandle-llamatokendataarraynative-single-single-single" class="md-nav__link">
- llama_sample_typical(SafeLLamaContextHandle, LLamaTokenDataArrayNative&, Single, Single, Single)
- </a>
-
- <nav class="md-nav" aria-label="llama_sample_typical(SafeLLamaContextHandle, LLamaTokenDataArrayNative&, Single, Single, Single)">
- <ul class="md-nav__list">
-
- <li class="md-nav__item">
- <a href="#parameters_31" class="md-nav__link">
- Parameters
- </a>
-
- </li>
-
- </ul>
- </nav>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#llama_sample_tempsafellamacontexthandle-llamatokendataarraynative-single" class="md-nav__link">
- llama_sample_temp(SafeLLamaContextHandle, LLamaTokenDataArrayNative&, Single)
- </a>
-
- <nav class="md-nav" aria-label="llama_sample_temp(SafeLLamaContextHandle, LLamaTokenDataArrayNative&, Single)">
- <ul class="md-nav__list">
-
- <li class="md-nav__item">
- <a href="#parameters_32" class="md-nav__link">
- Parameters
- </a>
-
- </li>
-
- </ul>
- </nav>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#llama_get_embeddingssafellamacontexthandle" class="md-nav__link">
- llama_get_embeddings(SafeLLamaContextHandle)
- </a>
-
- <nav class="md-nav" aria-label="llama_get_embeddings(SafeLLamaContextHandle)">
- <ul class="md-nav__list">
-
- <li class="md-nav__item">
- <a href="#parameters_33" class="md-nav__link">
- Parameters
- </a>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#returns_17" class="md-nav__link">
- Returns
- </a>
-
- </li>
-
- </ul>
- </nav>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#llama_chat_apply_templatesafellamamodelhandle-char-llamachatmessage-intptr-boolean-char-int32" class="md-nav__link">
- llama_chat_apply_template(SafeLlamaModelHandle, Char, LLamaChatMessage, IntPtr, Boolean, Char*, Int32)
- </a>
-
- <nav class="md-nav" aria-label="llama_chat_apply_template(SafeLlamaModelHandle, Char, LLamaChatMessage, IntPtr, Boolean, Char*, Int32)">
- <ul class="md-nav__list">
-
- <li class="md-nav__item">
- <a href="#parameters_34" class="md-nav__link">
- Parameters
- </a>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#returns_18" class="md-nav__link">
- Returns
- </a>
-
- </li>
-
- </ul>
- </nav>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#llama_token_bossafellamamodelhandle" class="md-nav__link">
- llama_token_bos(SafeLlamaModelHandle)
- </a>
-
- <nav class="md-nav" aria-label="llama_token_bos(SafeLlamaModelHandle)">
- <ul class="md-nav__list">
-
- <li class="md-nav__item">
- <a href="#parameters_35" class="md-nav__link">
- Parameters
- </a>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#returns_19" class="md-nav__link">
- Returns
- </a>
-
- </li>
-
- </ul>
- </nav>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#llama_token_eossafellamamodelhandle" class="md-nav__link">
- llama_token_eos(SafeLlamaModelHandle)
- </a>
-
- <nav class="md-nav" aria-label="llama_token_eos(SafeLlamaModelHandle)">
- <ul class="md-nav__list">
-
- <li class="md-nav__item">
- <a href="#parameters_36" class="md-nav__link">
- Parameters
- </a>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#returns_20" class="md-nav__link">
- Returns
- </a>
-
- </li>
-
- </ul>
- </nav>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#llama_token_nlsafellamamodelhandle" class="md-nav__link">
- llama_token_nl(SafeLlamaModelHandle)
- </a>
-
- <nav class="md-nav" aria-label="llama_token_nl(SafeLlamaModelHandle)">
- <ul class="md-nav__list">
-
- <li class="md-nav__item">
- <a href="#parameters_37" class="md-nav__link">
- Parameters
- </a>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#returns_21" class="md-nav__link">
- Returns
- </a>
-
- </li>
-
- </ul>
- </nav>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#llama_add_bos_tokensafellamamodelhandle" class="md-nav__link">
- llama_add_bos_token(SafeLlamaModelHandle)
- </a>
-
- <nav class="md-nav" aria-label="llama_add_bos_token(SafeLlamaModelHandle)">
- <ul class="md-nav__list">
-
- <li class="md-nav__item">
- <a href="#parameters_38" class="md-nav__link">
- Parameters
- </a>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#returns_22" class="md-nav__link">
- Returns
- </a>
-
- </li>
-
- </ul>
- </nav>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#llama_add_eos_tokensafellamamodelhandle" class="md-nav__link">
- llama_add_eos_token(SafeLlamaModelHandle)
- </a>
-
- <nav class="md-nav" aria-label="llama_add_eos_token(SafeLlamaModelHandle)">
- <ul class="md-nav__list">
-
- <li class="md-nav__item">
- <a href="#parameters_39" class="md-nav__link">
- Parameters
- </a>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#returns_23" class="md-nav__link">
- Returns
- </a>
-
- </li>
-
- </ul>
- </nav>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#llama_token_prefixsafellamamodelhandle" class="md-nav__link">
- llama_token_prefix(SafeLlamaModelHandle)
- </a>
-
- <nav class="md-nav" aria-label="llama_token_prefix(SafeLlamaModelHandle)">
- <ul class="md-nav__list">
-
- <li class="md-nav__item">
- <a href="#parameters_40" class="md-nav__link">
- Parameters
- </a>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#returns_24" class="md-nav__link">
- Returns
- </a>
-
- </li>
-
- </ul>
- </nav>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#llama_token_middlesafellamamodelhandle" class="md-nav__link">
- llama_token_middle(SafeLlamaModelHandle)
- </a>
-
- <nav class="md-nav" aria-label="llama_token_middle(SafeLlamaModelHandle)">
- <ul class="md-nav__list">
-
- <li class="md-nav__item">
- <a href="#parameters_41" class="md-nav__link">
- Parameters
- </a>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#returns_25" class="md-nav__link">
- Returns
- </a>
-
- </li>
-
- </ul>
- </nav>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#llama_token_suffixsafellamamodelhandle" class="md-nav__link">
- llama_token_suffix(SafeLlamaModelHandle)
- </a>
-
- <nav class="md-nav" aria-label="llama_token_suffix(SafeLlamaModelHandle)">
- <ul class="md-nav__list">
-
- <li class="md-nav__item">
- <a href="#parameters_42" class="md-nav__link">
- Parameters
- </a>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#returns_26" class="md-nav__link">
- Returns
- </a>
-
- </li>
-
- </ul>
- </nav>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#llama_token_eotsafellamamodelhandle" class="md-nav__link">
- llama_token_eot(SafeLlamaModelHandle)
- </a>
-
- <nav class="md-nav" aria-label="llama_token_eot(SafeLlamaModelHandle)">
- <ul class="md-nav__list">
-
- <li class="md-nav__item">
- <a href="#parameters_43" class="md-nav__link">
- Parameters
- </a>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#returns_27" class="md-nav__link">
- Returns
- </a>
-
- </li>
-
- </ul>
- </nav>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#llama_print_timingssafellamacontexthandle" class="md-nav__link">
- llama_print_timings(SafeLLamaContextHandle)
- </a>
-
- <nav class="md-nav" aria-label="llama_print_timings(SafeLLamaContextHandle)">
- <ul class="md-nav__list">
-
- <li class="md-nav__item">
- <a href="#parameters_44" class="md-nav__link">
- Parameters
- </a>
-
- </li>
-
- </ul>
- </nav>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#llama_reset_timingssafellamacontexthandle" class="md-nav__link">
- llama_reset_timings(SafeLLamaContextHandle)
- </a>
-
- <nav class="md-nav" aria-label="llama_reset_timings(SafeLLamaContextHandle)">
- <ul class="md-nav__list">
-
- <li class="md-nav__item">
- <a href="#parameters_45" class="md-nav__link">
- Parameters
- </a>
-
- </li>
-
- </ul>
- </nav>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#llama_print_system_info" class="md-nav__link">
- llama_print_system_info()
- </a>
-
- <nav class="md-nav" aria-label="llama_print_system_info()">
- <ul class="md-nav__list">
-
- <li class="md-nav__item">
- <a href="#returns_28" class="md-nav__link">
- Returns
- </a>
-
- </li>
-
- </ul>
- </nav>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#llama_token_to_piecesafellamamodelhandle-llamatoken-spanbyte" class="md-nav__link">
- llama_token_to_piece(SafeLlamaModelHandle, LLamaToken, Span<Byte>)
- </a>
-
- <nav class="md-nav" aria-label="llama_token_to_piece(SafeLlamaModelHandle, LLamaToken, Span<Byte>)">
- <ul class="md-nav__list">
-
- <li class="md-nav__item">
- <a href="#parameters_46" class="md-nav__link">
- Parameters
- </a>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#returns_29" class="md-nav__link">
- Returns
- </a>
-
- </li>
-
- </ul>
- </nav>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#llama_tokenizesafellamamodelhandle-byte-int32-llamatoken-int32-boolean-boolean" class="md-nav__link">
- llama_tokenize(SafeLlamaModelHandle, Byte, Int32, LLamaToken, Int32, Boolean, Boolean)
- </a>
-
- <nav class="md-nav" aria-label="llama_tokenize(SafeLlamaModelHandle, Byte, Int32, LLamaToken, Int32, Boolean, Boolean)">
- <ul class="md-nav__list">
-
- <li class="md-nav__item">
- <a href="#parameters_47" class="md-nav__link">
- Parameters
- </a>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#returns_30" class="md-nav__link">
- Returns
- </a>
-
- </li>
-
- </ul>
- </nav>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#llama_log_setllamalogcallback" class="md-nav__link">
- llama_log_set(LLamaLogCallback)
- </a>
-
- <nav class="md-nav" aria-label="llama_log_set(LLamaLogCallback)">
- <ul class="md-nav__list">
-
- <li class="md-nav__item">
- <a href="#parameters_48" class="md-nav__link">
- Parameters
- </a>
-
- </li>
-
- </ul>
- </nav>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#llama_kv_cache_clearsafellamacontexthandle" class="md-nav__link">
- llama_kv_cache_clear(SafeLLamaContextHandle)
- </a>
-
- <nav class="md-nav" aria-label="llama_kv_cache_clear(SafeLLamaContextHandle)">
- <ul class="md-nav__list">
-
- <li class="md-nav__item">
- <a href="#parameters_49" class="md-nav__link">
- Parameters
- </a>
-
- </li>
-
- </ul>
- </nav>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#llama_kv_cache_seq_rmsafellamacontexthandle-llamaseqid-llamapos-llamapos" class="md-nav__link">
- llama_kv_cache_seq_rm(SafeLLamaContextHandle, LLamaSeqId, LLamaPos, LLamaPos)
- </a>
-
- <nav class="md-nav" aria-label="llama_kv_cache_seq_rm(SafeLLamaContextHandle, LLamaSeqId, LLamaPos, LLamaPos)">
- <ul class="md-nav__list">
-
- <li class="md-nav__item">
- <a href="#parameters_50" class="md-nav__link">
- Parameters
- </a>
-
- </li>
-
- </ul>
- </nav>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#llama_kv_cache_seq_cpsafellamacontexthandle-llamaseqid-llamaseqid-llamapos-llamapos" class="md-nav__link">
- llama_kv_cache_seq_cp(SafeLLamaContextHandle, LLamaSeqId, LLamaSeqId, LLamaPos, LLamaPos)
- </a>
-
- <nav class="md-nav" aria-label="llama_kv_cache_seq_cp(SafeLLamaContextHandle, LLamaSeqId, LLamaSeqId, LLamaPos, LLamaPos)">
- <ul class="md-nav__list">
-
- <li class="md-nav__item">
- <a href="#parameters_51" class="md-nav__link">
- Parameters
- </a>
-
- </li>
-
- </ul>
- </nav>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#llama_kv_cache_seq_keepsafellamacontexthandle-llamaseqid" class="md-nav__link">
- llama_kv_cache_seq_keep(SafeLLamaContextHandle, LLamaSeqId)
- </a>
-
- <nav class="md-nav" aria-label="llama_kv_cache_seq_keep(SafeLLamaContextHandle, LLamaSeqId)">
- <ul class="md-nav__list">
-
- <li class="md-nav__item">
- <a href="#parameters_52" class="md-nav__link">
- Parameters
- </a>
-
- </li>
-
- </ul>
- </nav>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#llama_kv_cache_seq_addsafellamacontexthandle-llamaseqid-llamapos-llamapos-int32" class="md-nav__link">
- llama_kv_cache_seq_add(SafeLLamaContextHandle, LLamaSeqId, LLamaPos, LLamaPos, Int32)
- </a>
-
- <nav class="md-nav" aria-label="llama_kv_cache_seq_add(SafeLLamaContextHandle, LLamaSeqId, LLamaPos, LLamaPos, Int32)">
- <ul class="md-nav__list">
-
- <li class="md-nav__item">
- <a href="#parameters_53" class="md-nav__link">
- Parameters
- </a>
-
- </li>
-
- </ul>
- </nav>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#llama_kv_cache_seq_divsafellamacontexthandle-llamaseqid-llamapos-llamapos-int32" class="md-nav__link">
- llama_kv_cache_seq_div(SafeLLamaContextHandle, LLamaSeqId, LLamaPos, LLamaPos, Int32)
- </a>
-
- <nav class="md-nav" aria-label="llama_kv_cache_seq_div(SafeLLamaContextHandle, LLamaSeqId, LLamaPos, LLamaPos, Int32)">
- <ul class="md-nav__list">
-
- <li class="md-nav__item">
- <a href="#parameters_54" class="md-nav__link">
- Parameters
- </a>
-
- </li>
-
- </ul>
- </nav>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#llama_kv_cache_seq_pos_maxsafellamacontexthandle-llamaseqid" class="md-nav__link">
- llama_kv_cache_seq_pos_max(SafeLLamaContextHandle, LLamaSeqId)
- </a>
-
- <nav class="md-nav" aria-label="llama_kv_cache_seq_pos_max(SafeLLamaContextHandle, LLamaSeqId)">
- <ul class="md-nav__list">
-
- <li class="md-nav__item">
- <a href="#parameters_55" class="md-nav__link">
- Parameters
- </a>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#returns_31" class="md-nav__link">
- Returns
- </a>
-
- </li>
-
- </ul>
- </nav>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#llama_kv_cache_defragsafellamacontexthandle" class="md-nav__link">
- llama_kv_cache_defrag(SafeLLamaContextHandle)
- </a>
-
- <nav class="md-nav" aria-label="llama_kv_cache_defrag(SafeLLamaContextHandle)">
- <ul class="md-nav__list">
-
- <li class="md-nav__item">
- <a href="#parameters_56" class="md-nav__link">
- Parameters
- </a>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#returns_32" class="md-nav__link">
- Returns
- </a>
-
- </li>
-
- </ul>
- </nav>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#llama_kv_cache_updatesafellamacontexthandle" class="md-nav__link">
- llama_kv_cache_update(SafeLLamaContextHandle)
- </a>
-
- <nav class="md-nav" aria-label="llama_kv_cache_update(SafeLLamaContextHandle)">
- <ul class="md-nav__list">
-
- <li class="md-nav__item">
- <a href="#parameters_57" class="md-nav__link">
- Parameters
- </a>
-
- </li>
-
- </ul>
- </nav>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#llama_batch_initint32-int32-int32" class="md-nav__link">
- llama_batch_init(Int32, Int32, Int32)
- </a>
-
- <nav class="md-nav" aria-label="llama_batch_init(Int32, Int32, Int32)">
- <ul class="md-nav__list">
-
- <li class="md-nav__item">
- <a href="#parameters_58" class="md-nav__link">
- Parameters
- </a>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#returns_33" class="md-nav__link">
- Returns
- </a>
-
- </li>
-
- </ul>
- </nav>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#llama_batch_freellamanativebatch" class="md-nav__link">
- llama_batch_free(LLamaNativeBatch)
- </a>
-
- <nav class="md-nav" aria-label="llama_batch_free(LLamaNativeBatch)">
- <ul class="md-nav__list">
-
- <li class="md-nav__item">
- <a href="#parameters_59" class="md-nav__link">
- Parameters
- </a>
-
- </li>
-
- </ul>
- </nav>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#llama_decodesafellamacontexthandle-llamanativebatch" class="md-nav__link">
- llama_decode(SafeLLamaContextHandle, LLamaNativeBatch)
- </a>
-
- <nav class="md-nav" aria-label="llama_decode(SafeLLamaContextHandle, LLamaNativeBatch)">
- <ul class="md-nav__list">
-
- <li class="md-nav__item">
- <a href="#parameters_60" class="md-nav__link">
- Parameters
- </a>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#returns_34" class="md-nav__link">
- Returns
- </a>
-
- </li>
-
- </ul>
- </nav>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#llama_kv_cache_view_initsafellamacontexthandle-int32" class="md-nav__link">
- llama_kv_cache_view_init(SafeLLamaContextHandle, Int32)
- </a>
-
- <nav class="md-nav" aria-label="llama_kv_cache_view_init(SafeLLamaContextHandle, Int32)">
- <ul class="md-nav__list">
-
- <li class="md-nav__item">
- <a href="#parameters_61" class="md-nav__link">
- Parameters
- </a>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#returns_35" class="md-nav__link">
- Returns
- </a>
-
- </li>
-
- </ul>
- </nav>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#llama_kv_cache_view_freellamakvcacheview" class="md-nav__link">
- llama_kv_cache_view_free(LLamaKvCacheView&)
- </a>
-
- <nav class="md-nav" aria-label="llama_kv_cache_view_free(LLamaKvCacheView&)">
- <ul class="md-nav__list">
-
- <li class="md-nav__item">
- <a href="#parameters_62" class="md-nav__link">
- Parameters
- </a>
-
- </li>
-
- </ul>
- </nav>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#llama_kv_cache_view_updatesafellamacontexthandle-llamakvcacheview" class="md-nav__link">
- llama_kv_cache_view_update(SafeLLamaContextHandle, LLamaKvCacheView&)
- </a>
-
- <nav class="md-nav" aria-label="llama_kv_cache_view_update(SafeLLamaContextHandle, LLamaKvCacheView&)">
- <ul class="md-nav__list">
-
- <li class="md-nav__item">
- <a href="#parameters_63" class="md-nav__link">
- Parameters
- </a>
-
- </li>
-
- </ul>
- </nav>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#llama_get_kv_cache_token_countsafellamacontexthandle" class="md-nav__link">
- llama_get_kv_cache_token_count(SafeLLamaContextHandle)
- </a>
-
- <nav class="md-nav" aria-label="llama_get_kv_cache_token_count(SafeLLamaContextHandle)">
- <ul class="md-nav__list">
-
- <li class="md-nav__item">
- <a href="#parameters_64" class="md-nav__link">
- Parameters
- </a>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#returns_36" class="md-nav__link">
- Returns
- </a>
-
- </li>
-
- </ul>
- </nav>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#llama_get_kv_cache_used_cellssafellamacontexthandle" class="md-nav__link">
- llama_get_kv_cache_used_cells(SafeLLamaContextHandle)
- </a>
-
- <nav class="md-nav" aria-label="llama_get_kv_cache_used_cells(SafeLLamaContextHandle)">
- <ul class="md-nav__list">
-
- <li class="md-nav__item">
- <a href="#parameters_65" class="md-nav__link">
- Parameters
- </a>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#returns_37" class="md-nav__link">
- Returns
- </a>
-
- </li>
-
- </ul>
- </nav>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#llama_beam_searchsafellamacontexthandle-llamabeamsearchcallback-intptr-uint64-int32-int32-int32" class="md-nav__link">
- llama_beam_search(SafeLLamaContextHandle, LLamaBeamSearchCallback, IntPtr, UInt64, Int32, Int32, Int32)
- </a>
-
- <nav class="md-nav" aria-label="llama_beam_search(SafeLLamaContextHandle, LLamaBeamSearchCallback, IntPtr, UInt64, Int32, Int32, Int32)">
- <ul class="md-nav__list">
-
- <li class="md-nav__item">
- <a href="#parameters_66" class="md-nav__link">
- Parameters
- </a>
-
- </li>
-
- </ul>
- </nav>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#llama_empty_call" class="md-nav__link">
- llama_empty_call()
- </a>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#llama_max_devices" class="md-nav__link">
- llama_max_devices()
- </a>
-
- <nav class="md-nav" aria-label="llama_max_devices()">
- <ul class="md-nav__list">
-
- <li class="md-nav__item">
- <a href="#returns_38" class="md-nav__link">
- Returns
- </a>
-
- </li>
-
- </ul>
- </nav>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#llama_model_default_params" class="md-nav__link">
- llama_model_default_params()
- </a>
-
- <nav class="md-nav" aria-label="llama_model_default_params()">
- <ul class="md-nav__list">
-
- <li class="md-nav__item">
- <a href="#returns_39" class="md-nav__link">
- Returns
- </a>
-
- </li>
-
- </ul>
- </nav>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#llama_context_default_params" class="md-nav__link">
- llama_context_default_params()
- </a>
-
- <nav class="md-nav" aria-label="llama_context_default_params()">
- <ul class="md-nav__list">
-
- <li class="md-nav__item">
- <a href="#returns_40" class="md-nav__link">
- Returns
- </a>
-
- </li>
-
- </ul>
- </nav>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#llama_model_quantize_default_params" class="md-nav__link">
- llama_model_quantize_default_params()
- </a>
-
- <nav class="md-nav" aria-label="llama_model_quantize_default_params()">
- <ul class="md-nav__list">
-
- <li class="md-nav__item">
- <a href="#returns_41" class="md-nav__link">
- Returns
- </a>
-
- </li>
-
- </ul>
- </nav>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#llama_supports_mmap" class="md-nav__link">
- llama_supports_mmap()
- </a>
-
- <nav class="md-nav" aria-label="llama_supports_mmap()">
- <ul class="md-nav__list">
-
- <li class="md-nav__item">
- <a href="#returns_42" class="md-nav__link">
- Returns
- </a>
-
- </li>
-
- </ul>
- </nav>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#llama_supports_mlock" class="md-nav__link">
- llama_supports_mlock()
- </a>
-
- <nav class="md-nav" aria-label="llama_supports_mlock()">
- <ul class="md-nav__list">
-
- <li class="md-nav__item">
- <a href="#returns_43" class="md-nav__link">
- Returns
- </a>
-
- </li>
-
- </ul>
- </nav>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#llama_supports_gpu_offload" class="md-nav__link">
- llama_supports_gpu_offload()
- </a>
-
- <nav class="md-nav" aria-label="llama_supports_gpu_offload()">
- <ul class="md-nav__list">
-
- <li class="md-nav__item">
- <a href="#returns_44" class="md-nav__link">
- Returns
- </a>
-
- </li>
-
- </ul>
- </nav>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#llama_set_rng_seedsafellamacontexthandle-uint32" class="md-nav__link">
- llama_set_rng_seed(SafeLLamaContextHandle, UInt32)
- </a>
-
- <nav class="md-nav" aria-label="llama_set_rng_seed(SafeLLamaContextHandle, UInt32)">
- <ul class="md-nav__list">
-
- <li class="md-nav__item">
- <a href="#parameters_67" class="md-nav__link">
- Parameters
- </a>
-
- </li>
-
- </ul>
- </nav>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#llama_get_state_sizesafellamacontexthandle" class="md-nav__link">
- llama_get_state_size(SafeLLamaContextHandle)
- </a>
-
- <nav class="md-nav" aria-label="llama_get_state_size(SafeLLamaContextHandle)">
- <ul class="md-nav__list">
-
- <li class="md-nav__item">
- <a href="#parameters_68" class="md-nav__link">
- Parameters
- </a>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#returns_45" class="md-nav__link">
- Returns
- </a>
-
- </li>
-
- </ul>
- </nav>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#llama_copy_state_datasafellamacontexthandle-byte" class="md-nav__link">
- llama_copy_state_data(SafeLLamaContextHandle, Byte*)
- </a>
-
- <nav class="md-nav" aria-label="llama_copy_state_data(SafeLLamaContextHandle, Byte*)">
- <ul class="md-nav__list">
-
- <li class="md-nav__item">
- <a href="#parameters_69" class="md-nav__link">
- Parameters
- </a>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#returns_46" class="md-nav__link">
- Returns
- </a>
-
- </li>
-
- </ul>
- </nav>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#llama_set_state_datasafellamacontexthandle-byte" class="md-nav__link">
- llama_set_state_data(SafeLLamaContextHandle, Byte*)
- </a>
-
- <nav class="md-nav" aria-label="llama_set_state_data(SafeLLamaContextHandle, Byte*)">
- <ul class="md-nav__list">
-
- <li class="md-nav__item">
- <a href="#parameters_70" class="md-nav__link">
- Parameters
- </a>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#returns_47" class="md-nav__link">
- Returns
- </a>
-
- </li>
-
- </ul>
- </nav>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#llama_load_session_filesafellamacontexthandle-string-llamatoken-uint64-uint64" class="md-nav__link">
- llama_load_session_file(SafeLLamaContextHandle, String, LLamaToken[], UInt64, UInt64&)
- </a>
-
- <nav class="md-nav" aria-label="llama_load_session_file(SafeLLamaContextHandle, String, LLamaToken[], UInt64, UInt64&)">
- <ul class="md-nav__list">
-
- <li class="md-nav__item">
- <a href="#parameters_71" class="md-nav__link">
- Parameters
- </a>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#returns_48" class="md-nav__link">
- Returns
- </a>
-
- </li>
-
- </ul>
- </nav>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#llama_save_session_filesafellamacontexthandle-string-llamatoken-uint64" class="md-nav__link">
- llama_save_session_file(SafeLLamaContextHandle, String, LLamaToken[], UInt64)
- </a>
-
- <nav class="md-nav" aria-label="llama_save_session_file(SafeLLamaContextHandle, String, LLamaToken[], UInt64)">
- <ul class="md-nav__list">
-
- <li class="md-nav__item">
- <a href="#parameters_72" class="md-nav__link">
- Parameters
- </a>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#returns_49" class="md-nav__link">
- Returns
- </a>
-
- </li>
-
- </ul>
- </nav>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#llama_token_get_textsafellamamodelhandle-llamatoken" class="md-nav__link">
- llama_token_get_text(SafeLlamaModelHandle, LLamaToken)
- </a>
-
- <nav class="md-nav" aria-label="llama_token_get_text(SafeLlamaModelHandle, LLamaToken)">
- <ul class="md-nav__list">
-
- <li class="md-nav__item">
- <a href="#parameters_73" class="md-nav__link">
- Parameters
- </a>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#returns_50" class="md-nav__link">
- Returns
- </a>
-
- </li>
-
- </ul>
- </nav>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#llama_token_get_scoresafellamamodelhandle-llamatoken" class="md-nav__link">
- llama_token_get_score(SafeLlamaModelHandle, LLamaToken)
- </a>
-
- <nav class="md-nav" aria-label="llama_token_get_score(SafeLlamaModelHandle, LLamaToken)">
- <ul class="md-nav__list">
-
- <li class="md-nav__item">
- <a href="#parameters_74" class="md-nav__link">
- Parameters
- </a>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#returns_51" class="md-nav__link">
- Returns
- </a>
-
- </li>
-
- </ul>
- </nav>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#llama_token_get_typesafellamamodelhandle-llamatoken" class="md-nav__link">
- llama_token_get_type(SafeLlamaModelHandle, LLamaToken)
- </a>
-
- <nav class="md-nav" aria-label="llama_token_get_type(SafeLlamaModelHandle, LLamaToken)">
- <ul class="md-nav__list">
-
- <li class="md-nav__item">
- <a href="#parameters_75" class="md-nav__link">
- Parameters
- </a>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#returns_52" class="md-nav__link">
- Returns
- </a>
-
- </li>
-
- </ul>
- </nav>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#llama_n_ctxsafellamacontexthandle" class="md-nav__link">
- llama_n_ctx(SafeLLamaContextHandle)
- </a>
-
- <nav class="md-nav" aria-label="llama_n_ctx(SafeLLamaContextHandle)">
- <ul class="md-nav__list">
-
- <li class="md-nav__item">
- <a href="#parameters_76" class="md-nav__link">
- Parameters
- </a>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#returns_53" class="md-nav__link">
- Returns
- </a>
-
- </li>
-
- </ul>
- </nav>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#llama_n_batchsafellamacontexthandle" class="md-nav__link">
- llama_n_batch(SafeLLamaContextHandle)
- </a>
-
- <nav class="md-nav" aria-label="llama_n_batch(SafeLLamaContextHandle)">
- <ul class="md-nav__list">
-
- <li class="md-nav__item">
- <a href="#parameters_77" class="md-nav__link">
- Parameters
- </a>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#returns_54" class="md-nav__link">
- Returns
- </a>
-
- </li>
-
- </ul>
- </nav>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#llama_get_logitssafellamacontexthandle" class="md-nav__link">
- llama_get_logits(SafeLLamaContextHandle)
- </a>
-
- <nav class="md-nav" aria-label="llama_get_logits(SafeLLamaContextHandle)">
- <ul class="md-nav__list">
-
- <li class="md-nav__item">
- <a href="#parameters_78" class="md-nav__link">
- Parameters
- </a>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#returns_55" class="md-nav__link">
- Returns
- </a>
-
- </li>
-
- </ul>
- </nav>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#llama_get_logits_ithsafellamacontexthandle-int32" class="md-nav__link">
- llama_get_logits_ith(SafeLLamaContextHandle, Int32)
- </a>
-
- <nav class="md-nav" aria-label="llama_get_logits_ith(SafeLLamaContextHandle, Int32)">
- <ul class="md-nav__list">
-
- <li class="md-nav__item">
- <a href="#parameters_79" class="md-nav__link">
- Parameters
- </a>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#returns_56" class="md-nav__link">
- Returns
- </a>
-
- </li>
-
- </ul>
- </nav>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#llama_get_embeddings_ithsafellamacontexthandle-int32" class="md-nav__link">
- llama_get_embeddings_ith(SafeLLamaContextHandle, Int32)
- </a>
-
- <nav class="md-nav" aria-label="llama_get_embeddings_ith(SafeLLamaContextHandle, Int32)">
- <ul class="md-nav__list">
-
- <li class="md-nav__item">
- <a href="#parameters_80" class="md-nav__link">
- Parameters
- </a>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#returns_57" class="md-nav__link">
- Returns
- </a>
-
- </li>
-
- </ul>
- </nav>
-
- </li>
-
- </ul>
- </nav>
-
- </li>
-
- </ul>
-
- </nav>
-
- </li>
-
-
-
-
-
-
-
-
-
- <li class="md-nav__item">
- <a href="../llama.native.nativelibraryconfig/" class="md-nav__link">
- llama.native.nativelibraryconfig
- </a>
- </li>
-
-
-
-
-
-
-
-
-
- <li class="md-nav__item">
- <a href="../llama.native.ropescalingtype/" class="md-nav__link">
- llama.native.ropescalingtype
- </a>
- </li>
-
-
-
-
-
-
-
-
-
- <li class="md-nav__item">
- <a href="../llama.native.safellamacontexthandle/" class="md-nav__link">
- llama.native.safellamacontexthandle
- </a>
- </li>
-
-
-
-
-
-
-
-
-
- <li class="md-nav__item">
- <a href="../llama.native.safellamagrammarhandle/" class="md-nav__link">
- llama.native.safellamagrammarhandle
- </a>
- </li>
-
-
-
-
-
-
-
-
-
- <li class="md-nav__item">
- <a href="../llama.native.safellamahandlebase/" class="md-nav__link">
- llama.native.safellamahandlebase
- </a>
- </li>
-
-
-
-
-
-
-
-
-
- <li class="md-nav__item">
- <a href="../llama.native.safellamamodelhandle/" class="md-nav__link">
- llama.native.safellamamodelhandle
- </a>
- </li>
-
-
-
-
-
-
-
-
-
- <li class="md-nav__item">
- <a href="../llama.native.safellavaimageembedhandle/" class="md-nav__link">
- llama.native.safellavaimageembedhandle
- </a>
- </li>
-
-
-
-
-
-
-
-
-
- <li class="md-nav__item">
- <a href="../llama.native.safellavamodelhandle/" class="md-nav__link">
- llama.native.safellavamodelhandle
- </a>
- </li>
-
-
-
-
-
-
-
-
-
- <li class="md-nav__item">
- <a href="../llama.quantizer/" class="md-nav__link">
- llama.quantizer
- </a>
- </li>
-
-
-
-
-
-
-
-
-
- <li class="md-nav__item">
- <a href="../llama.sampling.basesamplingpipeline/" class="md-nav__link">
- llama.sampling.basesamplingpipeline
- </a>
- </li>
-
-
-
-
-
-
-
-
-
- <li class="md-nav__item">
- <a href="../llama.sampling.defaultsamplingpipeline/" class="md-nav__link">
- llama.sampling.defaultsamplingpipeline
- </a>
- </li>
-
-
-
-
-
-
-
-
-
- <li class="md-nav__item">
- <a href="../llama.sampling.greedysamplingpipeline/" class="md-nav__link">
- llama.sampling.greedysamplingpipeline
- </a>
- </li>
-
-
-
-
-
-
-
-
-
- <li class="md-nav__item">
- <a href="../llama.sampling.isamplingpipeline/" class="md-nav__link">
- llama.sampling.isamplingpipeline
- </a>
- </li>
-
-
-
-
-
-
-
-
-
- <li class="md-nav__item">
- <a href="../llama.sampling.isamplingpipelineextensions/" class="md-nav__link">
- llama.sampling.isamplingpipelineextensions
- </a>
- </li>
-
-
-
-
-
-
-
-
-
- <li class="md-nav__item">
- <a href="../llama.sampling.mirostate2samplingpipeline/" class="md-nav__link">
- llama.sampling.mirostate2samplingpipeline
- </a>
- </li>
-
-
-
-
-
-
-
-
-
- <li class="md-nav__item">
- <a href="../llama.sampling.mirostatesamplingpipeline/" class="md-nav__link">
- llama.sampling.mirostatesamplingpipeline
- </a>
- </li>
-
-
-
-
-
-
-
-
-
- <li class="md-nav__item">
- <a href="../llama.sessionstate/" class="md-nav__link">
- llama.sessionstate
- </a>
- </li>
-
-
-
-
-
-
-
-
-
- <li class="md-nav__item">
- <a href="../llama.streamingtokendecoder/" class="md-nav__link">
- llama.streamingtokendecoder
- </a>
- </li>
-
-
-
-
-
-
-
-
-
- <li class="md-nav__item">
- <a href="../llama.types.chatcompletion/" class="md-nav__link">
- llama.types.chatcompletion
- </a>
- </li>
-
-
-
-
-
-
-
-
-
- <li class="md-nav__item">
- <a href="../llama.types.chatcompletionchoice/" class="md-nav__link">
- llama.types.chatcompletionchoice
- </a>
- </li>
-
-
-
-
-
-
-
-
-
- <li class="md-nav__item">
- <a href="../llama.types.chatcompletionchunk/" class="md-nav__link">
- llama.types.chatcompletionchunk
- </a>
- </li>
-
-
-
-
-
-
-
-
-
- <li class="md-nav__item">
- <a href="../llama.types.chatcompletionchunkchoice/" class="md-nav__link">
- llama.types.chatcompletionchunkchoice
- </a>
- </li>
-
-
-
-
-
-
-
-
-
- <li class="md-nav__item">
- <a href="../llama.types.chatcompletionchunkdelta/" class="md-nav__link">
- llama.types.chatcompletionchunkdelta
- </a>
- </li>
-
-
-
-
-
-
-
-
-
- <li class="md-nav__item">
- <a href="../llama.types.chatcompletionmessage/" class="md-nav__link">
- llama.types.chatcompletionmessage
- </a>
- </li>
-
-
-
-
-
-
-
-
-
- <li class="md-nav__item">
- <a href="../llama.types.chatmessagerecord/" class="md-nav__link">
- llama.types.chatmessagerecord
- </a>
- </li>
-
-
-
-
-
-
-
-
-
- <li class="md-nav__item">
- <a href="../llama.types.chatrole/" class="md-nav__link">
- llama.types.chatrole
- </a>
- </li>
-
-
-
-
-
-
-
-
-
- <li class="md-nav__item">
- <a href="../llama.types.completion/" class="md-nav__link">
- llama.types.completion
- </a>
- </li>
-
-
-
-
-
-
-
-
-
- <li class="md-nav__item">
- <a href="../llama.types.completionchoice/" class="md-nav__link">
- llama.types.completionchoice
- </a>
- </li>
-
-
-
-
-
-
-
-
-
- <li class="md-nav__item">
- <a href="../llama.types.completionchunk/" class="md-nav__link">
- llama.types.completionchunk
- </a>
- </li>
-
-
-
-
-
-
-
-
-
- <li class="md-nav__item">
- <a href="../llama.types.completionlogprobs/" class="md-nav__link">
- llama.types.completionlogprobs
- </a>
- </li>
-
-
-
-
-
-
-
-
-
- <li class="md-nav__item">
- <a href="../llama.types.completionusage/" class="md-nav__link">
- llama.types.completionusage
- </a>
- </li>
-
-
-
-
-
-
-
-
-
- <li class="md-nav__item">
- <a href="../llama.types.embedding/" class="md-nav__link">
- llama.types.embedding
- </a>
- </li>
-
-
-
-
-
-
-
-
-
- <li class="md-nav__item">
- <a href="../llama.types.embeddingdata/" class="md-nav__link">
- llama.types.embeddingdata
- </a>
- </li>
-
-
-
-
-
-
-
-
-
- <li class="md-nav__item">
- <a href="../llama.types.embeddingusage/" class="md-nav__link">
- llama.types.embeddingusage
- </a>
- </li>
-
-
-
-
-
-
-
-
-
- <li class="md-nav__item">
- <a href="../logger/" class="md-nav__link">
- logger
- </a>
- </li>
-
-
-
-
- </ul>
- </nav>
- </li>
-
-
-
- </ul>
- </nav>
- </div>
- </div>
- </div>
-
-
-
- <div class="md-sidebar md-sidebar--secondary" data-md-component="sidebar" data-md-type="toc" >
- <div class="md-sidebar__scrollwrap">
- <div class="md-sidebar__inner">
-
-
- <nav class="md-nav md-nav--secondary" aria-label="Table of contents">
-
-
-
-
-
-
- <label class="md-nav__title" for="__toc">
- <span class="md-nav__icon md-icon"></span>
- Table of contents
- </label>
- <ul class="md-nav__list" data-md-component="toc" data-md-scrollfix>
-
- <li class="md-nav__item">
- <a href="#methods" class="md-nav__link">
- Methods
- </a>
-
- <nav class="md-nav" aria-label="Methods">
- <ul class="md-nav__list">
-
- <li class="md-nav__item">
- <a href="#llama_sample_token_mirostatsafellamacontexthandle-llamatokendataarraynative-single-single-int32-single" class="md-nav__link">
- llama_sample_token_mirostat(SafeLLamaContextHandle, LLamaTokenDataArrayNative&, Single, Single, Int32, Single&)
- </a>
-
- <nav class="md-nav" aria-label="llama_sample_token_mirostat(SafeLLamaContextHandle, LLamaTokenDataArrayNative&, Single, Single, Int32, Single&)">
- <ul class="md-nav__list">
-
- <li class="md-nav__item">
- <a href="#parameters" class="md-nav__link">
- Parameters
- </a>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#returns" class="md-nav__link">
- Returns
- </a>
-
- </li>
-
- </ul>
- </nav>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#llama_sample_token_mirostat_v2safellamacontexthandle-llamatokendataarraynative-single-single-single" class="md-nav__link">
- llama_sample_token_mirostat_v2(SafeLLamaContextHandle, LLamaTokenDataArrayNative&, Single, Single, Single&)
- </a>
-
- <nav class="md-nav" aria-label="llama_sample_token_mirostat_v2(SafeLLamaContextHandle, LLamaTokenDataArrayNative&, Single, Single, Single&)">
- <ul class="md-nav__list">
-
- <li class="md-nav__item">
- <a href="#parameters_1" class="md-nav__link">
- Parameters
- </a>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#returns_1" class="md-nav__link">
- Returns
- </a>
-
- </li>
-
- </ul>
- </nav>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#llama_sample_token_greedysafellamacontexthandle-llamatokendataarraynative" class="md-nav__link">
- llama_sample_token_greedy(SafeLLamaContextHandle, LLamaTokenDataArrayNative&)
- </a>
-
- <nav class="md-nav" aria-label="llama_sample_token_greedy(SafeLLamaContextHandle, LLamaTokenDataArrayNative&)">
- <ul class="md-nav__list">
-
- <li class="md-nav__item">
- <a href="#parameters_2" class="md-nav__link">
- Parameters
- </a>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#returns_2" class="md-nav__link">
- Returns
- </a>
-
- </li>
-
- </ul>
- </nav>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#llama_sample_tokensafellamacontexthandle-llamatokendataarraynative" class="md-nav__link">
- llama_sample_token(SafeLLamaContextHandle, LLamaTokenDataArrayNative&)
- </a>
-
- <nav class="md-nav" aria-label="llama_sample_token(SafeLLamaContextHandle, LLamaTokenDataArrayNative&)">
- <ul class="md-nav__list">
-
- <li class="md-nav__item">
- <a href="#parameters_3" class="md-nav__link">
- Parameters
- </a>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#returns_3" class="md-nav__link">
- Returns
- </a>
-
- </li>
-
- </ul>
- </nav>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#llama_get_embeddingsg__llama_get_embeddings_native30_0safellamacontexthandle" class="md-nav__link">
- <llama_get_embeddings>g__llama_get_embeddings_native|30_0(SafeLLamaContextHandle)
- </a>
-
- <nav class="md-nav" aria-label="<llama_get_embeddings>g__llama_get_embeddings_native|30_0(SafeLLamaContextHandle)">
- <ul class="md-nav__list">
-
- <li class="md-nav__item">
- <a href="#parameters_4" class="md-nav__link">
- Parameters
- </a>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#returns_4" class="md-nav__link">
- Returns
- </a>
-
- </li>
-
- </ul>
- </nav>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#llama_token_to_pieceg__llama_token_to_piece_native44_0safellamamodelhandle-llamatoken-byte-int32" class="md-nav__link">
- <llama_token_to_piece>g__llama_token_to_piece_native|44_0(SafeLlamaModelHandle, LLamaToken, Byte*, Int32)
- </a>
-
- <nav class="md-nav" aria-label="<llama_token_to_piece>g__llama_token_to_piece_native|44_0(SafeLlamaModelHandle, LLamaToken, Byte*, Int32)">
- <ul class="md-nav__list">
-
- <li class="md-nav__item">
- <a href="#parameters_5" class="md-nav__link">
- Parameters
- </a>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#returns_5" class="md-nav__link">
- Returns
- </a>
-
- </li>
-
- </ul>
- </nav>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#tryloadlibrariesg__tryload84_0string" class="md-nav__link">
- <TryLoadLibraries>g__TryLoad|84_0(String)
- </a>
-
- <nav class="md-nav" aria-label="<TryLoadLibraries>g__TryLoad|84_0(String)">
- <ul class="md-nav__list">
-
- <li class="md-nav__item">
- <a href="#parameters_6" class="md-nav__link">
- Parameters
- </a>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#returns_6" class="md-nav__link">
- Returns
- </a>
-
- </li>
-
- </ul>
- </nav>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#tryloadlibrariesg__tryfindpath84_1string-c__displayclass84_0" class="md-nav__link">
- <TryLoadLibraries>g__TryFindPath|84_1(String, <>c__DisplayClass84_0&)
- </a>
-
- <nav class="md-nav" aria-label="<TryLoadLibraries>g__TryFindPath|84_1(String, <>c__DisplayClass84_0&)">
- <ul class="md-nav__list">
-
- <li class="md-nav__item">
- <a href="#parameters_7" class="md-nav__link">
- Parameters
- </a>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#returns_7" class="md-nav__link">
- Returns
- </a>
-
- </li>
-
- </ul>
- </nav>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#llama_set_n_threadssafellamacontexthandle-uint32-uint32" class="md-nav__link">
- llama_set_n_threads(SafeLLamaContextHandle, UInt32, UInt32)
- </a>
-
- <nav class="md-nav" aria-label="llama_set_n_threads(SafeLLamaContextHandle, UInt32, UInt32)">
- <ul class="md-nav__list">
-
- <li class="md-nav__item">
- <a href="#parameters_8" class="md-nav__link">
- Parameters
- </a>
-
- </li>
-
- </ul>
- </nav>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#llama_vocab_typesafellamamodelhandle" class="md-nav__link">
- llama_vocab_type(SafeLlamaModelHandle)
- </a>
-
- <nav class="md-nav" aria-label="llama_vocab_type(SafeLlamaModelHandle)">
- <ul class="md-nav__list">
-
- <li class="md-nav__item">
- <a href="#parameters_9" class="md-nav__link">
- Parameters
- </a>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#returns_8" class="md-nav__link">
- Returns
- </a>
-
- </li>
-
- </ul>
- </nav>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#llama_rope_typesafellamamodelhandle" class="md-nav__link">
- llama_rope_type(SafeLlamaModelHandle)
- </a>
-
- <nav class="md-nav" aria-label="llama_rope_type(SafeLlamaModelHandle)">
- <ul class="md-nav__list">
-
- <li class="md-nav__item">
- <a href="#parameters_10" class="md-nav__link">
- Parameters
- </a>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#returns_9" class="md-nav__link">
- Returns
- </a>
-
- </li>
-
- </ul>
- </nav>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#llama_grammar_initllamagrammarelement-uint64-uint64" class="md-nav__link">
- llama_grammar_init(LLamaGrammarElement, UInt64, UInt64)**
- </a>
-
- <nav class="md-nav" aria-label="llama_grammar_init(LLamaGrammarElement, UInt64, UInt64)**">
- <ul class="md-nav__list">
-
- <li class="md-nav__item">
- <a href="#parameters_11" class="md-nav__link">
- Parameters
- </a>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#returns_10" class="md-nav__link">
- Returns
- </a>
-
- </li>
-
- </ul>
- </nav>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#llama_grammar_freeintptr" class="md-nav__link">
- llama_grammar_free(IntPtr)
- </a>
-
- <nav class="md-nav" aria-label="llama_grammar_free(IntPtr)">
- <ul class="md-nav__list">
-
- <li class="md-nav__item">
- <a href="#parameters_12" class="md-nav__link">
- Parameters
- </a>
-
- </li>
-
- </ul>
- </nav>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#llama_grammar_copysafellamagrammarhandle" class="md-nav__link">
- llama_grammar_copy(SafeLLamaGrammarHandle)
- </a>
-
- <nav class="md-nav" aria-label="llama_grammar_copy(SafeLLamaGrammarHandle)">
- <ul class="md-nav__list">
-
- <li class="md-nav__item">
- <a href="#parameters_13" class="md-nav__link">
- Parameters
- </a>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#returns_11" class="md-nav__link">
- Returns
- </a>
-
- </li>
-
- </ul>
- </nav>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#llama_sample_grammarsafellamacontexthandle-llamatokendataarraynative-safellamagrammarhandle" class="md-nav__link">
- llama_sample_grammar(SafeLLamaContextHandle, LLamaTokenDataArrayNative&, SafeLLamaGrammarHandle)
- </a>
-
- <nav class="md-nav" aria-label="llama_sample_grammar(SafeLLamaContextHandle, LLamaTokenDataArrayNative&, SafeLLamaGrammarHandle)">
- <ul class="md-nav__list">
-
- <li class="md-nav__item">
- <a href="#parameters_14" class="md-nav__link">
- Parameters
- </a>
-
- </li>
-
- </ul>
- </nav>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#llama_grammar_accept_tokensafellamacontexthandle-safellamagrammarhandle-llamatoken" class="md-nav__link">
- llama_grammar_accept_token(SafeLLamaContextHandle, SafeLLamaGrammarHandle, LLamaToken)
- </a>
-
- <nav class="md-nav" aria-label="llama_grammar_accept_token(SafeLLamaContextHandle, SafeLLamaGrammarHandle, LLamaToken)">
- <ul class="md-nav__list">
-
- <li class="md-nav__item">
- <a href="#parameters_15" class="md-nav__link">
- Parameters
- </a>
-
- </li>
-
- </ul>
- </nav>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#llava_validate_embed_sizesafellamacontexthandle-safellavamodelhandle" class="md-nav__link">
- llava_validate_embed_size(SafeLLamaContextHandle, SafeLlavaModelHandle)
- </a>
-
- <nav class="md-nav" aria-label="llava_validate_embed_size(SafeLLamaContextHandle, SafeLlavaModelHandle)">
- <ul class="md-nav__list">
-
- <li class="md-nav__item">
- <a href="#parameters_16" class="md-nav__link">
- Parameters
- </a>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#returns_12" class="md-nav__link">
- Returns
- </a>
-
- </li>
-
- </ul>
- </nav>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#llava_image_embed_make_with_bytessafellavamodelhandle-int32-byte-int32" class="md-nav__link">
- llava_image_embed_make_with_bytes(SafeLlavaModelHandle, Int32, Byte[], Int32)
- </a>
-
- <nav class="md-nav" aria-label="llava_image_embed_make_with_bytes(SafeLlavaModelHandle, Int32, Byte[], Int32)">
- <ul class="md-nav__list">
-
- <li class="md-nav__item">
- <a href="#parameters_17" class="md-nav__link">
- Parameters
- </a>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#returns_13" class="md-nav__link">
- Returns
- </a>
-
- </li>
-
- </ul>
- </nav>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#llava_image_embed_make_with_filenamesafellavamodelhandle-int32-string" class="md-nav__link">
- llava_image_embed_make_with_filename(SafeLlavaModelHandle, Int32, String)
- </a>
-
- <nav class="md-nav" aria-label="llava_image_embed_make_with_filename(SafeLlavaModelHandle, Int32, String)">
- <ul class="md-nav__list">
-
- <li class="md-nav__item">
- <a href="#parameters_18" class="md-nav__link">
- Parameters
- </a>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#returns_14" class="md-nav__link">
- Returns
- </a>
-
- </li>
-
- </ul>
- </nav>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#llava_image_embed_freeintptr" class="md-nav__link">
- llava_image_embed_free(IntPtr)
- </a>
-
- <nav class="md-nav" aria-label="llava_image_embed_free(IntPtr)">
- <ul class="md-nav__list">
-
- <li class="md-nav__item">
- <a href="#parameters_19" class="md-nav__link">
- Parameters
- </a>
-
- </li>
-
- </ul>
- </nav>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#llava_eval_image_embedsafellamacontexthandle-safellavaimageembedhandle-int32-int32" class="md-nav__link">
- llava_eval_image_embed(SafeLLamaContextHandle, SafeLlavaImageEmbedHandle, Int32, Int32&)
- </a>
-
- <nav class="md-nav" aria-label="llava_eval_image_embed(SafeLLamaContextHandle, SafeLlavaImageEmbedHandle, Int32, Int32&)">
- <ul class="md-nav__list">
-
- <li class="md-nav__item">
- <a href="#parameters_20" class="md-nav__link">
- Parameters
- </a>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#returns_15" class="md-nav__link">
- Returns
- </a>
-
- </li>
-
- </ul>
- </nav>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#llama_model_quantizestring-string-llamamodelquantizeparams" class="md-nav__link">
- llama_model_quantize(String, String, LLamaModelQuantizeParams*)
- </a>
-
- <nav class="md-nav" aria-label="llama_model_quantize(String, String, LLamaModelQuantizeParams*)">
- <ul class="md-nav__list">
-
- <li class="md-nav__item">
- <a href="#parameters_21" class="md-nav__link">
- Parameters
- </a>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#returns_16" class="md-nav__link">
- Returns
- </a>
-
- </li>
-
- </ul>
- </nav>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#llama_sample_repetition_penaltiessafellamacontexthandle-llamatokendataarraynative-llamatoken-uint64-single-single-single" class="md-nav__link">
- llama_sample_repetition_penalties(SafeLLamaContextHandle, LLamaTokenDataArrayNative&, LLamaToken*, UInt64, Single, Single, Single)
- </a>
-
- <nav class="md-nav" aria-label="llama_sample_repetition_penalties(SafeLLamaContextHandle, LLamaTokenDataArrayNative&, LLamaToken*, UInt64, Single, Single, Single)">
- <ul class="md-nav__list">
-
- <li class="md-nav__item">
- <a href="#parameters_22" class="md-nav__link">
- Parameters
- </a>
-
- </li>
-
- </ul>
- </nav>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#llama_sample_apply_guidancesafellamacontexthandle-spansingle-readonlyspansingle-single" class="md-nav__link">
- llama_sample_apply_guidance(SafeLLamaContextHandle, Span<Single>, ReadOnlySpan<Single>, Single)
- </a>
-
- <nav class="md-nav" aria-label="llama_sample_apply_guidance(SafeLLamaContextHandle, Span<Single>, ReadOnlySpan<Single>, Single)">
- <ul class="md-nav__list">
-
- <li class="md-nav__item">
- <a href="#parameters_23" class="md-nav__link">
- Parameters
- </a>
-
- </li>
-
- </ul>
- </nav>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#llama_sample_apply_guidancesafellamacontexthandle-single-single-single" class="md-nav__link">
- llama_sample_apply_guidance(SafeLLamaContextHandle, Single, Single, Single)
- </a>
-
- <nav class="md-nav" aria-label="llama_sample_apply_guidance(SafeLLamaContextHandle, Single, Single, Single)">
- <ul class="md-nav__list">
-
- <li class="md-nav__item">
- <a href="#parameters_24" class="md-nav__link">
- Parameters
- </a>
-
- </li>
-
- </ul>
- </nav>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#llama_sample_softmaxsafellamacontexthandle-llamatokendataarraynative" class="md-nav__link">
- llama_sample_softmax(SafeLLamaContextHandle, LLamaTokenDataArrayNative&)
- </a>
-
- <nav class="md-nav" aria-label="llama_sample_softmax(SafeLLamaContextHandle, LLamaTokenDataArrayNative&)">
- <ul class="md-nav__list">
-
- <li class="md-nav__item">
- <a href="#parameters_25" class="md-nav__link">
- Parameters
- </a>
-
- </li>
-
- </ul>
- </nav>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#llama_sample_top_ksafellamacontexthandle-llamatokendataarraynative-int32-uint64" class="md-nav__link">
- llama_sample_top_k(SafeLLamaContextHandle, LLamaTokenDataArrayNative&, Int32, UInt64)
- </a>
-
- <nav class="md-nav" aria-label="llama_sample_top_k(SafeLLamaContextHandle, LLamaTokenDataArrayNative&, Int32, UInt64)">
- <ul class="md-nav__list">
-
- <li class="md-nav__item">
- <a href="#parameters_26" class="md-nav__link">
- Parameters
- </a>
-
- </li>
-
- </ul>
- </nav>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#llama_sample_top_psafellamacontexthandle-llamatokendataarraynative-single-uint64" class="md-nav__link">
- llama_sample_top_p(SafeLLamaContextHandle, LLamaTokenDataArrayNative&, Single, UInt64)
- </a>
-
- <nav class="md-nav" aria-label="llama_sample_top_p(SafeLLamaContextHandle, LLamaTokenDataArrayNative&, Single, UInt64)">
- <ul class="md-nav__list">
-
- <li class="md-nav__item">
- <a href="#parameters_27" class="md-nav__link">
- Parameters
- </a>
-
- </li>
-
- </ul>
- </nav>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#llama_sample_min_psafellamacontexthandle-llamatokendataarraynative-single-uint64" class="md-nav__link">
- llama_sample_min_p(SafeLLamaContextHandle, LLamaTokenDataArrayNative&, Single, UInt64)
- </a>
-
- <nav class="md-nav" aria-label="llama_sample_min_p(SafeLLamaContextHandle, LLamaTokenDataArrayNative&, Single, UInt64)">
- <ul class="md-nav__list">
-
- <li class="md-nav__item">
- <a href="#parameters_28" class="md-nav__link">
- Parameters
- </a>
-
- </li>
-
- </ul>
- </nav>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#llama_sample_tail_freesafellamacontexthandle-llamatokendataarraynative-single-uint64" class="md-nav__link">
- llama_sample_tail_free(SafeLLamaContextHandle, LLamaTokenDataArrayNative&, Single, UInt64)
- </a>
-
- <nav class="md-nav" aria-label="llama_sample_tail_free(SafeLLamaContextHandle, LLamaTokenDataArrayNative&, Single, UInt64)">
- <ul class="md-nav__list">
-
- <li class="md-nav__item">
- <a href="#parameters_29" class="md-nav__link">
- Parameters
- </a>
-
- </li>
-
- </ul>
- </nav>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#llama_sample_typicalsafellamacontexthandle-llamatokendataarraynative-single-uint64" class="md-nav__link">
- llama_sample_typical(SafeLLamaContextHandle, LLamaTokenDataArrayNative&, Single, UInt64)
- </a>
-
- <nav class="md-nav" aria-label="llama_sample_typical(SafeLLamaContextHandle, LLamaTokenDataArrayNative&, Single, UInt64)">
- <ul class="md-nav__list">
-
- <li class="md-nav__item">
- <a href="#parameters_30" class="md-nav__link">
- Parameters
- </a>
-
- </li>
-
- </ul>
- </nav>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#llama_sample_typicalsafellamacontexthandle-llamatokendataarraynative-single-single-single" class="md-nav__link">
- llama_sample_typical(SafeLLamaContextHandle, LLamaTokenDataArrayNative&, Single, Single, Single)
- </a>
-
- <nav class="md-nav" aria-label="llama_sample_typical(SafeLLamaContextHandle, LLamaTokenDataArrayNative&, Single, Single, Single)">
- <ul class="md-nav__list">
-
- <li class="md-nav__item">
- <a href="#parameters_31" class="md-nav__link">
- Parameters
- </a>
-
- </li>
-
- </ul>
- </nav>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#llama_sample_tempsafellamacontexthandle-llamatokendataarraynative-single" class="md-nav__link">
- llama_sample_temp(SafeLLamaContextHandle, LLamaTokenDataArrayNative&, Single)
- </a>
-
- <nav class="md-nav" aria-label="llama_sample_temp(SafeLLamaContextHandle, LLamaTokenDataArrayNative&, Single)">
- <ul class="md-nav__list">
-
- <li class="md-nav__item">
- <a href="#parameters_32" class="md-nav__link">
- Parameters
- </a>
-
- </li>
-
- </ul>
- </nav>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#llama_get_embeddingssafellamacontexthandle" class="md-nav__link">
- llama_get_embeddings(SafeLLamaContextHandle)
- </a>
-
- <nav class="md-nav" aria-label="llama_get_embeddings(SafeLLamaContextHandle)">
- <ul class="md-nav__list">
-
- <li class="md-nav__item">
- <a href="#parameters_33" class="md-nav__link">
- Parameters
- </a>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#returns_17" class="md-nav__link">
- Returns
- </a>
-
- </li>
-
- </ul>
- </nav>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#llama_chat_apply_templatesafellamamodelhandle-char-llamachatmessage-intptr-boolean-char-int32" class="md-nav__link">
- llama_chat_apply_template(SafeLlamaModelHandle, Char, LLamaChatMessage, IntPtr, Boolean, Char*, Int32)
- </a>
-
- <nav class="md-nav" aria-label="llama_chat_apply_template(SafeLlamaModelHandle, Char, LLamaChatMessage, IntPtr, Boolean, Char*, Int32)">
- <ul class="md-nav__list">
-
- <li class="md-nav__item">
- <a href="#parameters_34" class="md-nav__link">
- Parameters
- </a>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#returns_18" class="md-nav__link">
- Returns
- </a>
-
- </li>
-
- </ul>
- </nav>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#llama_token_bossafellamamodelhandle" class="md-nav__link">
- llama_token_bos(SafeLlamaModelHandle)
- </a>
-
- <nav class="md-nav" aria-label="llama_token_bos(SafeLlamaModelHandle)">
- <ul class="md-nav__list">
-
- <li class="md-nav__item">
- <a href="#parameters_35" class="md-nav__link">
- Parameters
- </a>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#returns_19" class="md-nav__link">
- Returns
- </a>
-
- </li>
-
- </ul>
- </nav>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#llama_token_eossafellamamodelhandle" class="md-nav__link">
- llama_token_eos(SafeLlamaModelHandle)
- </a>
-
- <nav class="md-nav" aria-label="llama_token_eos(SafeLlamaModelHandle)">
- <ul class="md-nav__list">
-
- <li class="md-nav__item">
- <a href="#parameters_36" class="md-nav__link">
- Parameters
- </a>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#returns_20" class="md-nav__link">
- Returns
- </a>
-
- </li>
-
- </ul>
- </nav>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#llama_token_nlsafellamamodelhandle" class="md-nav__link">
- llama_token_nl(SafeLlamaModelHandle)
- </a>
-
- <nav class="md-nav" aria-label="llama_token_nl(SafeLlamaModelHandle)">
- <ul class="md-nav__list">
-
- <li class="md-nav__item">
- <a href="#parameters_37" class="md-nav__link">
- Parameters
- </a>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#returns_21" class="md-nav__link">
- Returns
- </a>
-
- </li>
-
- </ul>
- </nav>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#llama_add_bos_tokensafellamamodelhandle" class="md-nav__link">
- llama_add_bos_token(SafeLlamaModelHandle)
- </a>
-
- <nav class="md-nav" aria-label="llama_add_bos_token(SafeLlamaModelHandle)">
- <ul class="md-nav__list">
-
- <li class="md-nav__item">
- <a href="#parameters_38" class="md-nav__link">
- Parameters
- </a>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#returns_22" class="md-nav__link">
- Returns
- </a>
-
- </li>
-
- </ul>
- </nav>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#llama_add_eos_tokensafellamamodelhandle" class="md-nav__link">
- llama_add_eos_token(SafeLlamaModelHandle)
- </a>
-
- <nav class="md-nav" aria-label="llama_add_eos_token(SafeLlamaModelHandle)">
- <ul class="md-nav__list">
-
- <li class="md-nav__item">
- <a href="#parameters_39" class="md-nav__link">
- Parameters
- </a>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#returns_23" class="md-nav__link">
- Returns
- </a>
-
- </li>
-
- </ul>
- </nav>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#llama_token_prefixsafellamamodelhandle" class="md-nav__link">
- llama_token_prefix(SafeLlamaModelHandle)
- </a>
-
- <nav class="md-nav" aria-label="llama_token_prefix(SafeLlamaModelHandle)">
- <ul class="md-nav__list">
-
- <li class="md-nav__item">
- <a href="#parameters_40" class="md-nav__link">
- Parameters
- </a>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#returns_24" class="md-nav__link">
- Returns
- </a>
-
- </li>
-
- </ul>
- </nav>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#llama_token_middlesafellamamodelhandle" class="md-nav__link">
- llama_token_middle(SafeLlamaModelHandle)
- </a>
-
- <nav class="md-nav" aria-label="llama_token_middle(SafeLlamaModelHandle)">
- <ul class="md-nav__list">
-
- <li class="md-nav__item">
- <a href="#parameters_41" class="md-nav__link">
- Parameters
- </a>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#returns_25" class="md-nav__link">
- Returns
- </a>
-
- </li>
-
- </ul>
- </nav>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#llama_token_suffixsafellamamodelhandle" class="md-nav__link">
- llama_token_suffix(SafeLlamaModelHandle)
- </a>
-
- <nav class="md-nav" aria-label="llama_token_suffix(SafeLlamaModelHandle)">
- <ul class="md-nav__list">
-
- <li class="md-nav__item">
- <a href="#parameters_42" class="md-nav__link">
- Parameters
- </a>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#returns_26" class="md-nav__link">
- Returns
- </a>
-
- </li>
-
- </ul>
- </nav>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#llama_token_eotsafellamamodelhandle" class="md-nav__link">
- llama_token_eot(SafeLlamaModelHandle)
- </a>
-
- <nav class="md-nav" aria-label="llama_token_eot(SafeLlamaModelHandle)">
- <ul class="md-nav__list">
-
- <li class="md-nav__item">
- <a href="#parameters_43" class="md-nav__link">
- Parameters
- </a>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#returns_27" class="md-nav__link">
- Returns
- </a>
-
- </li>
-
- </ul>
- </nav>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#llama_print_timingssafellamacontexthandle" class="md-nav__link">
- llama_print_timings(SafeLLamaContextHandle)
- </a>
-
- <nav class="md-nav" aria-label="llama_print_timings(SafeLLamaContextHandle)">
- <ul class="md-nav__list">
-
- <li class="md-nav__item">
- <a href="#parameters_44" class="md-nav__link">
- Parameters
- </a>
-
- </li>
-
- </ul>
- </nav>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#llama_reset_timingssafellamacontexthandle" class="md-nav__link">
- llama_reset_timings(SafeLLamaContextHandle)
- </a>
-
- <nav class="md-nav" aria-label="llama_reset_timings(SafeLLamaContextHandle)">
- <ul class="md-nav__list">
-
- <li class="md-nav__item">
- <a href="#parameters_45" class="md-nav__link">
- Parameters
- </a>
-
- </li>
-
- </ul>
- </nav>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#llama_print_system_info" class="md-nav__link">
- llama_print_system_info()
- </a>
-
- <nav class="md-nav" aria-label="llama_print_system_info()">
- <ul class="md-nav__list">
-
- <li class="md-nav__item">
- <a href="#returns_28" class="md-nav__link">
- Returns
- </a>
-
- </li>
-
- </ul>
- </nav>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#llama_token_to_piecesafellamamodelhandle-llamatoken-spanbyte" class="md-nav__link">
- llama_token_to_piece(SafeLlamaModelHandle, LLamaToken, Span<Byte>)
- </a>
-
- <nav class="md-nav" aria-label="llama_token_to_piece(SafeLlamaModelHandle, LLamaToken, Span<Byte>)">
- <ul class="md-nav__list">
-
- <li class="md-nav__item">
- <a href="#parameters_46" class="md-nav__link">
- Parameters
- </a>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#returns_29" class="md-nav__link">
- Returns
- </a>
-
- </li>
-
- </ul>
- </nav>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#llama_tokenizesafellamamodelhandle-byte-int32-llamatoken-int32-boolean-boolean" class="md-nav__link">
- llama_tokenize(SafeLlamaModelHandle, Byte, Int32, LLamaToken, Int32, Boolean, Boolean)
- </a>
-
- <nav class="md-nav" aria-label="llama_tokenize(SafeLlamaModelHandle, Byte, Int32, LLamaToken, Int32, Boolean, Boolean)">
- <ul class="md-nav__list">
-
- <li class="md-nav__item">
- <a href="#parameters_47" class="md-nav__link">
- Parameters
- </a>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#returns_30" class="md-nav__link">
- Returns
- </a>
-
- </li>
-
- </ul>
- </nav>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#llama_log_setllamalogcallback" class="md-nav__link">
- llama_log_set(LLamaLogCallback)
- </a>
-
- <nav class="md-nav" aria-label="llama_log_set(LLamaLogCallback)">
- <ul class="md-nav__list">
-
- <li class="md-nav__item">
- <a href="#parameters_48" class="md-nav__link">
- Parameters
- </a>
-
- </li>
-
- </ul>
- </nav>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#llama_kv_cache_clearsafellamacontexthandle" class="md-nav__link">
- llama_kv_cache_clear(SafeLLamaContextHandle)
- </a>
-
- <nav class="md-nav" aria-label="llama_kv_cache_clear(SafeLLamaContextHandle)">
- <ul class="md-nav__list">
-
- <li class="md-nav__item">
- <a href="#parameters_49" class="md-nav__link">
- Parameters
- </a>
-
- </li>
-
- </ul>
- </nav>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#llama_kv_cache_seq_rmsafellamacontexthandle-llamaseqid-llamapos-llamapos" class="md-nav__link">
- llama_kv_cache_seq_rm(SafeLLamaContextHandle, LLamaSeqId, LLamaPos, LLamaPos)
- </a>
-
- <nav class="md-nav" aria-label="llama_kv_cache_seq_rm(SafeLLamaContextHandle, LLamaSeqId, LLamaPos, LLamaPos)">
- <ul class="md-nav__list">
-
- <li class="md-nav__item">
- <a href="#parameters_50" class="md-nav__link">
- Parameters
- </a>
-
- </li>
-
- </ul>
- </nav>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#llama_kv_cache_seq_cpsafellamacontexthandle-llamaseqid-llamaseqid-llamapos-llamapos" class="md-nav__link">
- llama_kv_cache_seq_cp(SafeLLamaContextHandle, LLamaSeqId, LLamaSeqId, LLamaPos, LLamaPos)
- </a>
-
- <nav class="md-nav" aria-label="llama_kv_cache_seq_cp(SafeLLamaContextHandle, LLamaSeqId, LLamaSeqId, LLamaPos, LLamaPos)">
- <ul class="md-nav__list">
-
- <li class="md-nav__item">
- <a href="#parameters_51" class="md-nav__link">
- Parameters
- </a>
-
- </li>
-
- </ul>
- </nav>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#llama_kv_cache_seq_keepsafellamacontexthandle-llamaseqid" class="md-nav__link">
- llama_kv_cache_seq_keep(SafeLLamaContextHandle, LLamaSeqId)
- </a>
-
- <nav class="md-nav" aria-label="llama_kv_cache_seq_keep(SafeLLamaContextHandle, LLamaSeqId)">
- <ul class="md-nav__list">
-
- <li class="md-nav__item">
- <a href="#parameters_52" class="md-nav__link">
- Parameters
- </a>
-
- </li>
-
- </ul>
- </nav>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#llama_kv_cache_seq_addsafellamacontexthandle-llamaseqid-llamapos-llamapos-int32" class="md-nav__link">
- llama_kv_cache_seq_add(SafeLLamaContextHandle, LLamaSeqId, LLamaPos, LLamaPos, Int32)
- </a>
-
- <nav class="md-nav" aria-label="llama_kv_cache_seq_add(SafeLLamaContextHandle, LLamaSeqId, LLamaPos, LLamaPos, Int32)">
- <ul class="md-nav__list">
-
- <li class="md-nav__item">
- <a href="#parameters_53" class="md-nav__link">
- Parameters
- </a>
-
- </li>
-
- </ul>
- </nav>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#llama_kv_cache_seq_divsafellamacontexthandle-llamaseqid-llamapos-llamapos-int32" class="md-nav__link">
- llama_kv_cache_seq_div(SafeLLamaContextHandle, LLamaSeqId, LLamaPos, LLamaPos, Int32)
- </a>
-
- <nav class="md-nav" aria-label="llama_kv_cache_seq_div(SafeLLamaContextHandle, LLamaSeqId, LLamaPos, LLamaPos, Int32)">
- <ul class="md-nav__list">
-
- <li class="md-nav__item">
- <a href="#parameters_54" class="md-nav__link">
- Parameters
- </a>
-
- </li>
-
- </ul>
- </nav>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#llama_kv_cache_seq_pos_maxsafellamacontexthandle-llamaseqid" class="md-nav__link">
- llama_kv_cache_seq_pos_max(SafeLLamaContextHandle, LLamaSeqId)
- </a>
-
- <nav class="md-nav" aria-label="llama_kv_cache_seq_pos_max(SafeLLamaContextHandle, LLamaSeqId)">
- <ul class="md-nav__list">
-
- <li class="md-nav__item">
- <a href="#parameters_55" class="md-nav__link">
- Parameters
- </a>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#returns_31" class="md-nav__link">
- Returns
- </a>
-
- </li>
-
- </ul>
- </nav>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#llama_kv_cache_defragsafellamacontexthandle" class="md-nav__link">
- llama_kv_cache_defrag(SafeLLamaContextHandle)
- </a>
-
- <nav class="md-nav" aria-label="llama_kv_cache_defrag(SafeLLamaContextHandle)">
- <ul class="md-nav__list">
-
- <li class="md-nav__item">
- <a href="#parameters_56" class="md-nav__link">
- Parameters
- </a>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#returns_32" class="md-nav__link">
- Returns
- </a>
-
- </li>
-
- </ul>
- </nav>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#llama_kv_cache_updatesafellamacontexthandle" class="md-nav__link">
- llama_kv_cache_update(SafeLLamaContextHandle)
- </a>
-
- <nav class="md-nav" aria-label="llama_kv_cache_update(SafeLLamaContextHandle)">
- <ul class="md-nav__list">
-
- <li class="md-nav__item">
- <a href="#parameters_57" class="md-nav__link">
- Parameters
- </a>
-
- </li>
-
- </ul>
- </nav>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#llama_batch_initint32-int32-int32" class="md-nav__link">
- llama_batch_init(Int32, Int32, Int32)
- </a>
-
- <nav class="md-nav" aria-label="llama_batch_init(Int32, Int32, Int32)">
- <ul class="md-nav__list">
-
- <li class="md-nav__item">
- <a href="#parameters_58" class="md-nav__link">
- Parameters
- </a>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#returns_33" class="md-nav__link">
- Returns
- </a>
-
- </li>
-
- </ul>
- </nav>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#llama_batch_freellamanativebatch" class="md-nav__link">
- llama_batch_free(LLamaNativeBatch)
- </a>
-
- <nav class="md-nav" aria-label="llama_batch_free(LLamaNativeBatch)">
- <ul class="md-nav__list">
-
- <li class="md-nav__item">
- <a href="#parameters_59" class="md-nav__link">
- Parameters
- </a>
-
- </li>
-
- </ul>
- </nav>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#llama_decodesafellamacontexthandle-llamanativebatch" class="md-nav__link">
- llama_decode(SafeLLamaContextHandle, LLamaNativeBatch)
- </a>
-
- <nav class="md-nav" aria-label="llama_decode(SafeLLamaContextHandle, LLamaNativeBatch)">
- <ul class="md-nav__list">
-
- <li class="md-nav__item">
- <a href="#parameters_60" class="md-nav__link">
- Parameters
- </a>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#returns_34" class="md-nav__link">
- Returns
- </a>
-
- </li>
-
- </ul>
- </nav>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#llama_kv_cache_view_initsafellamacontexthandle-int32" class="md-nav__link">
- llama_kv_cache_view_init(SafeLLamaContextHandle, Int32)
- </a>
-
- <nav class="md-nav" aria-label="llama_kv_cache_view_init(SafeLLamaContextHandle, Int32)">
- <ul class="md-nav__list">
-
- <li class="md-nav__item">
- <a href="#parameters_61" class="md-nav__link">
- Parameters
- </a>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#returns_35" class="md-nav__link">
- Returns
- </a>
-
- </li>
-
- </ul>
- </nav>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#llama_kv_cache_view_freellamakvcacheview" class="md-nav__link">
- llama_kv_cache_view_free(LLamaKvCacheView&)
- </a>
-
- <nav class="md-nav" aria-label="llama_kv_cache_view_free(LLamaKvCacheView&)">
- <ul class="md-nav__list">
-
- <li class="md-nav__item">
- <a href="#parameters_62" class="md-nav__link">
- Parameters
- </a>
-
- </li>
-
- </ul>
- </nav>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#llama_kv_cache_view_updatesafellamacontexthandle-llamakvcacheview" class="md-nav__link">
- llama_kv_cache_view_update(SafeLLamaContextHandle, LLamaKvCacheView&)
- </a>
-
- <nav class="md-nav" aria-label="llama_kv_cache_view_update(SafeLLamaContextHandle, LLamaKvCacheView&)">
- <ul class="md-nav__list">
-
- <li class="md-nav__item">
- <a href="#parameters_63" class="md-nav__link">
- Parameters
- </a>
-
- </li>
-
- </ul>
- </nav>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#llama_get_kv_cache_token_countsafellamacontexthandle" class="md-nav__link">
- llama_get_kv_cache_token_count(SafeLLamaContextHandle)
- </a>
-
- <nav class="md-nav" aria-label="llama_get_kv_cache_token_count(SafeLLamaContextHandle)">
- <ul class="md-nav__list">
-
- <li class="md-nav__item">
- <a href="#parameters_64" class="md-nav__link">
- Parameters
- </a>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#returns_36" class="md-nav__link">
- Returns
- </a>
-
- </li>
-
- </ul>
- </nav>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#llama_get_kv_cache_used_cellssafellamacontexthandle" class="md-nav__link">
- llama_get_kv_cache_used_cells(SafeLLamaContextHandle)
- </a>
-
- <nav class="md-nav" aria-label="llama_get_kv_cache_used_cells(SafeLLamaContextHandle)">
- <ul class="md-nav__list">
-
- <li class="md-nav__item">
- <a href="#parameters_65" class="md-nav__link">
- Parameters
- </a>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#returns_37" class="md-nav__link">
- Returns
- </a>
-
- </li>
-
- </ul>
- </nav>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#llama_beam_searchsafellamacontexthandle-llamabeamsearchcallback-intptr-uint64-int32-int32-int32" class="md-nav__link">
- llama_beam_search(SafeLLamaContextHandle, LLamaBeamSearchCallback, IntPtr, UInt64, Int32, Int32, Int32)
- </a>
-
- <nav class="md-nav" aria-label="llama_beam_search(SafeLLamaContextHandle, LLamaBeamSearchCallback, IntPtr, UInt64, Int32, Int32, Int32)">
- <ul class="md-nav__list">
-
- <li class="md-nav__item">
- <a href="#parameters_66" class="md-nav__link">
- Parameters
- </a>
-
- </li>
-
- </ul>
- </nav>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#llama_empty_call" class="md-nav__link">
- llama_empty_call()
- </a>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#llama_max_devices" class="md-nav__link">
- llama_max_devices()
- </a>
-
- <nav class="md-nav" aria-label="llama_max_devices()">
- <ul class="md-nav__list">
-
- <li class="md-nav__item">
- <a href="#returns_38" class="md-nav__link">
- Returns
- </a>
-
- </li>
-
- </ul>
- </nav>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#llama_model_default_params" class="md-nav__link">
- llama_model_default_params()
- </a>
-
- <nav class="md-nav" aria-label="llama_model_default_params()">
- <ul class="md-nav__list">
-
- <li class="md-nav__item">
- <a href="#returns_39" class="md-nav__link">
- Returns
- </a>
-
- </li>
-
- </ul>
- </nav>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#llama_context_default_params" class="md-nav__link">
- llama_context_default_params()
- </a>
-
- <nav class="md-nav" aria-label="llama_context_default_params()">
- <ul class="md-nav__list">
-
- <li class="md-nav__item">
- <a href="#returns_40" class="md-nav__link">
- Returns
- </a>
-
- </li>
-
- </ul>
- </nav>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#llama_model_quantize_default_params" class="md-nav__link">
- llama_model_quantize_default_params()
- </a>
-
- <nav class="md-nav" aria-label="llama_model_quantize_default_params()">
- <ul class="md-nav__list">
-
- <li class="md-nav__item">
- <a href="#returns_41" class="md-nav__link">
- Returns
- </a>
-
- </li>
-
- </ul>
- </nav>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#llama_supports_mmap" class="md-nav__link">
- llama_supports_mmap()
- </a>
-
- <nav class="md-nav" aria-label="llama_supports_mmap()">
- <ul class="md-nav__list">
-
- <li class="md-nav__item">
- <a href="#returns_42" class="md-nav__link">
- Returns
- </a>
-
- </li>
-
- </ul>
- </nav>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#llama_supports_mlock" class="md-nav__link">
- llama_supports_mlock()
- </a>
-
- <nav class="md-nav" aria-label="llama_supports_mlock()">
- <ul class="md-nav__list">
-
- <li class="md-nav__item">
- <a href="#returns_43" class="md-nav__link">
- Returns
- </a>
-
- </li>
-
- </ul>
- </nav>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#llama_supports_gpu_offload" class="md-nav__link">
- llama_supports_gpu_offload()
- </a>
-
- <nav class="md-nav" aria-label="llama_supports_gpu_offload()">
- <ul class="md-nav__list">
-
- <li class="md-nav__item">
- <a href="#returns_44" class="md-nav__link">
- Returns
- </a>
-
- </li>
-
- </ul>
- </nav>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#llama_set_rng_seedsafellamacontexthandle-uint32" class="md-nav__link">
- llama_set_rng_seed(SafeLLamaContextHandle, UInt32)
- </a>
-
- <nav class="md-nav" aria-label="llama_set_rng_seed(SafeLLamaContextHandle, UInt32)">
- <ul class="md-nav__list">
-
- <li class="md-nav__item">
- <a href="#parameters_67" class="md-nav__link">
- Parameters
- </a>
-
- </li>
-
- </ul>
- </nav>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#llama_get_state_sizesafellamacontexthandle" class="md-nav__link">
- llama_get_state_size(SafeLLamaContextHandle)
- </a>
-
- <nav class="md-nav" aria-label="llama_get_state_size(SafeLLamaContextHandle)">
- <ul class="md-nav__list">
-
- <li class="md-nav__item">
- <a href="#parameters_68" class="md-nav__link">
- Parameters
- </a>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#returns_45" class="md-nav__link">
- Returns
- </a>
-
- </li>
-
- </ul>
- </nav>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#llama_copy_state_datasafellamacontexthandle-byte" class="md-nav__link">
- llama_copy_state_data(SafeLLamaContextHandle, Byte*)
- </a>
-
- <nav class="md-nav" aria-label="llama_copy_state_data(SafeLLamaContextHandle, Byte*)">
- <ul class="md-nav__list">
-
- <li class="md-nav__item">
- <a href="#parameters_69" class="md-nav__link">
- Parameters
- </a>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#returns_46" class="md-nav__link">
- Returns
- </a>
-
- </li>
-
- </ul>
- </nav>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#llama_set_state_datasafellamacontexthandle-byte" class="md-nav__link">
- llama_set_state_data(SafeLLamaContextHandle, Byte*)
- </a>
-
- <nav class="md-nav" aria-label="llama_set_state_data(SafeLLamaContextHandle, Byte*)">
- <ul class="md-nav__list">
-
- <li class="md-nav__item">
- <a href="#parameters_70" class="md-nav__link">
- Parameters
- </a>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#returns_47" class="md-nav__link">
- Returns
- </a>
-
- </li>
-
- </ul>
- </nav>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#llama_load_session_filesafellamacontexthandle-string-llamatoken-uint64-uint64" class="md-nav__link">
- llama_load_session_file(SafeLLamaContextHandle, String, LLamaToken[], UInt64, UInt64&)
- </a>
-
- <nav class="md-nav" aria-label="llama_load_session_file(SafeLLamaContextHandle, String, LLamaToken[], UInt64, UInt64&)">
- <ul class="md-nav__list">
-
- <li class="md-nav__item">
- <a href="#parameters_71" class="md-nav__link">
- Parameters
- </a>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#returns_48" class="md-nav__link">
- Returns
- </a>
-
- </li>
-
- </ul>
- </nav>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#llama_save_session_filesafellamacontexthandle-string-llamatoken-uint64" class="md-nav__link">
- llama_save_session_file(SafeLLamaContextHandle, String, LLamaToken[], UInt64)
- </a>
-
- <nav class="md-nav" aria-label="llama_save_session_file(SafeLLamaContextHandle, String, LLamaToken[], UInt64)">
- <ul class="md-nav__list">
-
- <li class="md-nav__item">
- <a href="#parameters_72" class="md-nav__link">
- Parameters
- </a>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#returns_49" class="md-nav__link">
- Returns
- </a>
-
- </li>
-
- </ul>
- </nav>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#llama_token_get_textsafellamamodelhandle-llamatoken" class="md-nav__link">
- llama_token_get_text(SafeLlamaModelHandle, LLamaToken)
- </a>
-
- <nav class="md-nav" aria-label="llama_token_get_text(SafeLlamaModelHandle, LLamaToken)">
- <ul class="md-nav__list">
-
- <li class="md-nav__item">
- <a href="#parameters_73" class="md-nav__link">
- Parameters
- </a>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#returns_50" class="md-nav__link">
- Returns
- </a>
-
- </li>
-
- </ul>
- </nav>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#llama_token_get_scoresafellamamodelhandle-llamatoken" class="md-nav__link">
- llama_token_get_score(SafeLlamaModelHandle, LLamaToken)
- </a>
-
- <nav class="md-nav" aria-label="llama_token_get_score(SafeLlamaModelHandle, LLamaToken)">
- <ul class="md-nav__list">
-
- <li class="md-nav__item">
- <a href="#parameters_74" class="md-nav__link">
- Parameters
- </a>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#returns_51" class="md-nav__link">
- Returns
- </a>
-
- </li>
-
- </ul>
- </nav>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#llama_token_get_typesafellamamodelhandle-llamatoken" class="md-nav__link">
- llama_token_get_type(SafeLlamaModelHandle, LLamaToken)
- </a>
-
- <nav class="md-nav" aria-label="llama_token_get_type(SafeLlamaModelHandle, LLamaToken)">
- <ul class="md-nav__list">
-
- <li class="md-nav__item">
- <a href="#parameters_75" class="md-nav__link">
- Parameters
- </a>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#returns_52" class="md-nav__link">
- Returns
- </a>
-
- </li>
-
- </ul>
- </nav>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#llama_n_ctxsafellamacontexthandle" class="md-nav__link">
- llama_n_ctx(SafeLLamaContextHandle)
- </a>
-
- <nav class="md-nav" aria-label="llama_n_ctx(SafeLLamaContextHandle)">
- <ul class="md-nav__list">
-
- <li class="md-nav__item">
- <a href="#parameters_76" class="md-nav__link">
- Parameters
- </a>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#returns_53" class="md-nav__link">
- Returns
- </a>
-
- </li>
-
- </ul>
- </nav>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#llama_n_batchsafellamacontexthandle" class="md-nav__link">
- llama_n_batch(SafeLLamaContextHandle)
- </a>
-
- <nav class="md-nav" aria-label="llama_n_batch(SafeLLamaContextHandle)">
- <ul class="md-nav__list">
-
- <li class="md-nav__item">
- <a href="#parameters_77" class="md-nav__link">
- Parameters
- </a>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#returns_54" class="md-nav__link">
- Returns
- </a>
-
- </li>
-
- </ul>
- </nav>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#llama_get_logitssafellamacontexthandle" class="md-nav__link">
- llama_get_logits(SafeLLamaContextHandle)
- </a>
-
- <nav class="md-nav" aria-label="llama_get_logits(SafeLLamaContextHandle)">
- <ul class="md-nav__list">
-
- <li class="md-nav__item">
- <a href="#parameters_78" class="md-nav__link">
- Parameters
- </a>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#returns_55" class="md-nav__link">
- Returns
- </a>
-
- </li>
-
- </ul>
- </nav>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#llama_get_logits_ithsafellamacontexthandle-int32" class="md-nav__link">
- llama_get_logits_ith(SafeLLamaContextHandle, Int32)
- </a>
-
- <nav class="md-nav" aria-label="llama_get_logits_ith(SafeLLamaContextHandle, Int32)">
- <ul class="md-nav__list">
-
- <li class="md-nav__item">
- <a href="#parameters_79" class="md-nav__link">
- Parameters
- </a>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#returns_56" class="md-nav__link">
- Returns
- </a>
-
- </li>
-
- </ul>
- </nav>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#llama_get_embeddings_ithsafellamacontexthandle-int32" class="md-nav__link">
- llama_get_embeddings_ith(SafeLLamaContextHandle, Int32)
- </a>
-
- <nav class="md-nav" aria-label="llama_get_embeddings_ith(SafeLLamaContextHandle, Int32)">
- <ul class="md-nav__list">
-
- <li class="md-nav__item">
- <a href="#parameters_80" class="md-nav__link">
- Parameters
- </a>
-
- </li>
-
- <li class="md-nav__item">
- <a href="#returns_57" class="md-nav__link">
- Returns
- </a>
-
- </li>
-
- </ul>
- </nav>
-
- </li>
-
- </ul>
- </nav>
-
- </li>
-
- </ul>
-
- </nav>
- </div>
- </div>
- </div>
-
-
-
- <div class="md-content" data-md-component="content">
- <article class="md-content__inner md-typeset">
-
-
-
-
- <h1 id="nativeapi">NativeApi</h1>
- <p>Namespace: LLama.Native</p>
- <p>Direct translation of the llama.cpp API</p>
- <pre><code class="language-csharp">public static class NativeApi
- </code></pre>
- <p>Inheritance <a href="https://docs.microsoft.com/en-us/dotnet/api/system.object">Object</a> → <a href="./">NativeApi</a></p>
- <h2 id="methods">Methods</h2>
- <h3 id="llama_sample_token_mirostatsafellamacontexthandle-llamatokendataarraynative-single-single-int32-single"><strong>llama_sample_token_mirostat(SafeLLamaContextHandle, LLamaTokenDataArrayNative&, Single, Single, Int32, Single&)</strong></h3>
- <p>Mirostat 1.0 algorithm described in the paper https://arxiv.org/abs/2007.14966. Uses tokens instead of words.</p>
- <pre><code class="language-csharp">public static LLamaToken llama_sample_token_mirostat(SafeLLamaContextHandle ctx, LLamaTokenDataArrayNative& candidates, float tau, float eta, int m, Single& mu)
- </code></pre>
- <h4 id="parameters">Parameters</h4>
- <p><code>ctx</code> <a href="../llama.native.safellamacontexthandle/">SafeLLamaContextHandle</a><br></p>
- <p><code>candidates</code> <a href="./llama.native.llamatokendataarraynative&.md">LLamaTokenDataArrayNative&</a><br>
- A vector of <code>llama_token_data</code> containing the candidate tokens, their probabilities (p), and log-odds (logit) for the current position in the generated text.</p>
- <p><code>tau</code> <a href="https://docs.microsoft.com/en-us/dotnet/api/system.single">Single</a><br>
- The target cross-entropy (or surprise) value you want to achieve for the generated text. A higher value corresponds to more surprising or less predictable text, while a lower value corresponds to less surprising or more predictable text.</p>
- <p><code>eta</code> <a href="https://docs.microsoft.com/en-us/dotnet/api/system.single">Single</a><br>
- The learning rate used to update <code>mu</code> based on the error between the target and observed surprisal of the sampled word. A larger learning rate will cause <code>mu</code> to be updated more quickly, while a smaller learning rate will result in slower updates.</p>
- <p><code>m</code> <a href="https://docs.microsoft.com/en-us/dotnet/api/system.int32">Int32</a><br>
- The number of tokens considered in the estimation of <code>s_hat</code>. This is an arbitrary value that is used to calculate <code>s_hat</code>, which in turn helps to calculate the value of <code>k</code>. In the paper, they use <code>m = 100</code>, but you can experiment with different values to see how it affects the performance of the algorithm.</p>
- <p><code>mu</code> <a href="https://docs.microsoft.com/en-us/dotnet/api/system.single&">Single&</a><br>
- Maximum cross-entropy. This value is initialized to be twice the target cross-entropy (<code>2 * tau</code>) and is updated in the algorithm based on the error between the target and observed surprisal.</p>
- <h4 id="returns">Returns</h4>
- <p><a href="../llama.native.llamatoken/">LLamaToken</a><br></p>
- <h3 id="llama_sample_token_mirostat_v2safellamacontexthandle-llamatokendataarraynative-single-single-single"><strong>llama_sample_token_mirostat_v2(SafeLLamaContextHandle, LLamaTokenDataArrayNative&, Single, Single, Single&)</strong></h3>
- <p>Mirostat 2.0 algorithm described in the paper https://arxiv.org/abs/2007.14966. Uses tokens instead of words.</p>
- <pre><code class="language-csharp">public static LLamaToken llama_sample_token_mirostat_v2(SafeLLamaContextHandle ctx, LLamaTokenDataArrayNative& candidates, float tau, float eta, Single& mu)
- </code></pre>
- <h4 id="parameters_1">Parameters</h4>
- <p><code>ctx</code> <a href="../llama.native.safellamacontexthandle/">SafeLLamaContextHandle</a><br></p>
- <p><code>candidates</code> <a href="./llama.native.llamatokendataarraynative&.md">LLamaTokenDataArrayNative&</a><br>
- A vector of <code>llama_token_data</code> containing the candidate tokens, their probabilities (p), and log-odds (logit) for the current position in the generated text.</p>
- <p><code>tau</code> <a href="https://docs.microsoft.com/en-us/dotnet/api/system.single">Single</a><br>
- The target cross-entropy (or surprise) value you want to achieve for the generated text. A higher value corresponds to more surprising or less predictable text, while a lower value corresponds to less surprising or more predictable text.</p>
- <p><code>eta</code> <a href="https://docs.microsoft.com/en-us/dotnet/api/system.single">Single</a><br>
- The learning rate used to update <code>mu</code> based on the error between the target and observed surprisal of the sampled word. A larger learning rate will cause <code>mu</code> to be updated more quickly, while a smaller learning rate will result in slower updates.</p>
- <p><code>mu</code> <a href="https://docs.microsoft.com/en-us/dotnet/api/system.single&">Single&</a><br>
- Maximum cross-entropy. This value is initialized to be twice the target cross-entropy (<code>2 * tau</code>) and is updated in the algorithm based on the error between the target and observed surprisal.</p>
- <h4 id="returns_1">Returns</h4>
- <p><a href="../llama.native.llamatoken/">LLamaToken</a><br></p>
- <h3 id="llama_sample_token_greedysafellamacontexthandle-llamatokendataarraynative"><strong>llama_sample_token_greedy(SafeLLamaContextHandle, LLamaTokenDataArrayNative&)</strong></h3>
- <p>Selects the token with the highest probability.</p>
- <pre><code class="language-csharp">public static LLamaToken llama_sample_token_greedy(SafeLLamaContextHandle ctx, LLamaTokenDataArrayNative& candidates)
- </code></pre>
- <h4 id="parameters_2">Parameters</h4>
- <p><code>ctx</code> <a href="../llama.native.safellamacontexthandle/">SafeLLamaContextHandle</a><br></p>
- <p><code>candidates</code> <a href="./llama.native.llamatokendataarraynative&.md">LLamaTokenDataArrayNative&</a><br>
- Pointer to LLamaTokenDataArray</p>
- <h4 id="returns_2">Returns</h4>
- <p><a href="../llama.native.llamatoken/">LLamaToken</a><br></p>
- <h3 id="llama_sample_tokensafellamacontexthandle-llamatokendataarraynative"><strong>llama_sample_token(SafeLLamaContextHandle, LLamaTokenDataArrayNative&)</strong></h3>
- <p>Randomly selects a token from the candidates based on their probabilities.</p>
- <pre><code class="language-csharp">public static LLamaToken llama_sample_token(SafeLLamaContextHandle ctx, LLamaTokenDataArrayNative& candidates)
- </code></pre>
- <h4 id="parameters_3">Parameters</h4>
- <p><code>ctx</code> <a href="../llama.native.safellamacontexthandle/">SafeLLamaContextHandle</a><br></p>
- <p><code>candidates</code> <a href="./llama.native.llamatokendataarraynative&.md">LLamaTokenDataArrayNative&</a><br>
- Pointer to LLamaTokenDataArray</p>
- <h4 id="returns_3">Returns</h4>
- <p><a href="../llama.native.llamatoken/">LLamaToken</a><br></p>
- <h3 id="llama_get_embeddingsg__llama_get_embeddings_native30_0safellamacontexthandle"><strong><llama_get_embeddings>g__llama_get_embeddings_native|30_0(SafeLLamaContextHandle)</strong></h3>
- <pre><code class="language-csharp">internal static Single* <llama_get_embeddings>g__llama_get_embeddings_native|30_0(SafeLLamaContextHandle ctx)
- </code></pre>
- <h4 id="parameters_4">Parameters</h4>
- <p><code>ctx</code> <a href="../llama.native.safellamacontexthandle/">SafeLLamaContextHandle</a><br></p>
- <h4 id="returns_4">Returns</h4>
- <p><a href="https://docs.microsoft.com/en-us/dotnet/api/system.single*">Single*</a><br></p>
- <h3 id="llama_token_to_pieceg__llama_token_to_piece_native44_0safellamamodelhandle-llamatoken-byte-int32"><strong><llama_token_to_piece>g__llama_token_to_piece_native|44_0(SafeLlamaModelHandle, LLamaToken, Byte*, Int32)</strong></h3>
- <pre><code class="language-csharp">internal static int <llama_token_to_piece>g__llama_token_to_piece_native|44_0(SafeLlamaModelHandle model, LLamaToken llamaToken, Byte* buffer, int length)
- </code></pre>
- <h4 id="parameters_5">Parameters</h4>
- <p><code>model</code> <a href="../llama.native.safellamamodelhandle/">SafeLlamaModelHandle</a><br></p>
- <p><code>llamaToken</code> <a href="../llama.native.llamatoken/">LLamaToken</a><br></p>
- <p><code>buffer</code> <a href="https://docs.microsoft.com/en-us/dotnet/api/system.byte*">Byte*</a><br></p>
- <p><code>length</code> <a href="https://docs.microsoft.com/en-us/dotnet/api/system.int32">Int32</a><br></p>
- <h4 id="returns_5">Returns</h4>
- <p><a href="https://docs.microsoft.com/en-us/dotnet/api/system.int32">Int32</a><br></p>
- <h3 id="tryloadlibrariesg__tryload84_0string"><strong><TryLoadLibraries>g__TryLoad|84_0(String)</strong></h3>
- <pre><code class="language-csharp">internal static IntPtr <TryLoadLibraries>g__TryLoad|84_0(string path)
- </code></pre>
- <h4 id="parameters_6">Parameters</h4>
- <p><code>path</code> <a href="https://docs.microsoft.com/en-us/dotnet/api/system.string">String</a><br></p>
- <h4 id="returns_6">Returns</h4>
- <p><a href="https://docs.microsoft.com/en-us/dotnet/api/system.intptr">IntPtr</a><br></p>
- <h3 id="tryloadlibrariesg__tryfindpath84_1string-c__displayclass84_0"><strong><TryLoadLibraries>g__TryFindPath|84_1(String, <>c__DisplayClass84_0&)</strong></h3>
- <pre><code class="language-csharp">internal static string <TryLoadLibraries>g__TryFindPath|84_1(string filename, <>c__DisplayClass84_0& )
- </code></pre>
- <h4 id="parameters_7">Parameters</h4>
- <p><code>filename</code> <a href="https://docs.microsoft.com/en-us/dotnet/api/system.string">String</a><br></p>
- <p>`` <a href="./llama.native.nativeapi.<>c__displayclass84_0&.md"><>c__DisplayClass84_0&</a><br></p>
- <h4 id="returns_7">Returns</h4>
- <p><a href="https://docs.microsoft.com/en-us/dotnet/api/system.string">String</a><br></p>
- <h3 id="llama_set_n_threadssafellamacontexthandle-uint32-uint32"><strong>llama_set_n_threads(SafeLLamaContextHandle, UInt32, UInt32)</strong></h3>
- <p>Set the number of threads used for decoding</p>
- <pre><code class="language-csharp">public static void llama_set_n_threads(SafeLLamaContextHandle ctx, uint n_threads, uint n_threads_batch)
- </code></pre>
- <h4 id="parameters_8">Parameters</h4>
- <p><code>ctx</code> <a href="../llama.native.safellamacontexthandle/">SafeLLamaContextHandle</a><br></p>
- <p><code>n_threads</code> <a href="https://docs.microsoft.com/en-us/dotnet/api/system.uint32">UInt32</a><br>
- n_threads is the number of threads used for generation (single token)</p>
- <p><code>n_threads_batch</code> <a href="https://docs.microsoft.com/en-us/dotnet/api/system.uint32">UInt32</a><br>
- n_threads_batch is the number of threads used for prompt and batch processing (multiple tokens)</p>
- <h3 id="llama_vocab_typesafellamamodelhandle"><strong>llama_vocab_type(SafeLlamaModelHandle)</strong></h3>
- <pre><code class="language-csharp">public static LLamaVocabType llama_vocab_type(SafeLlamaModelHandle model)
- </code></pre>
- <h4 id="parameters_9">Parameters</h4>
- <p><code>model</code> <a href="../llama.native.safellamamodelhandle/">SafeLlamaModelHandle</a><br></p>
- <h4 id="returns_8">Returns</h4>
- <p><a href="../llama.native.llamavocabtype/">LLamaVocabType</a><br></p>
- <h3 id="llama_rope_typesafellamamodelhandle"><strong>llama_rope_type(SafeLlamaModelHandle)</strong></h3>
- <pre><code class="language-csharp">public static LLamaRopeType llama_rope_type(SafeLlamaModelHandle model)
- </code></pre>
- <h4 id="parameters_10">Parameters</h4>
- <p><code>model</code> <a href="../llama.native.safellamamodelhandle/">SafeLlamaModelHandle</a><br></p>
- <h4 id="returns_9">Returns</h4>
- <p><a href="../llama.native.llamaropetype/">LLamaRopeType</a><br></p>
- <h3 id="llama_grammar_initllamagrammarelement-uint64-uint64"><strong>llama_grammar_init(LLamaGrammarElement</strong>, UInt64, UInt64)**</h3>
- <p>Create a new grammar from the given set of grammar rules</p>
- <pre><code class="language-csharp">public static IntPtr llama_grammar_init(LLamaGrammarElement** rules, ulong n_rules, ulong start_rule_index)
- </code></pre>
- <h4 id="parameters_11">Parameters</h4>
- <p><code>rules</code> <a href="./llama.native.llamagrammarelement**.md">LLamaGrammarElement**</a><br></p>
- <p><code>n_rules</code> <a href="https://docs.microsoft.com/en-us/dotnet/api/system.uint64">UInt64</a><br></p>
- <p><code>start_rule_index</code> <a href="https://docs.microsoft.com/en-us/dotnet/api/system.uint64">UInt64</a><br></p>
- <h4 id="returns_10">Returns</h4>
- <p><a href="https://docs.microsoft.com/en-us/dotnet/api/system.intptr">IntPtr</a><br></p>
- <h3 id="llama_grammar_freeintptr"><strong>llama_grammar_free(IntPtr)</strong></h3>
- <p>Free all memory from the given SafeLLamaGrammarHandle</p>
- <pre><code class="language-csharp">public static void llama_grammar_free(IntPtr grammar)
- </code></pre>
- <h4 id="parameters_12">Parameters</h4>
- <p><code>grammar</code> <a href="https://docs.microsoft.com/en-us/dotnet/api/system.intptr">IntPtr</a><br></p>
- <h3 id="llama_grammar_copysafellamagrammarhandle"><strong>llama_grammar_copy(SafeLLamaGrammarHandle)</strong></h3>
- <p>Create a copy of an existing grammar instance</p>
- <pre><code class="language-csharp">public static IntPtr llama_grammar_copy(SafeLLamaGrammarHandle grammar)
- </code></pre>
- <h4 id="parameters_13">Parameters</h4>
- <p><code>grammar</code> <a href="../llama.native.safellamagrammarhandle/">SafeLLamaGrammarHandle</a><br></p>
- <h4 id="returns_11">Returns</h4>
- <p><a href="https://docs.microsoft.com/en-us/dotnet/api/system.intptr">IntPtr</a><br></p>
- <h3 id="llama_sample_grammarsafellamacontexthandle-llamatokendataarraynative-safellamagrammarhandle"><strong>llama_sample_grammar(SafeLLamaContextHandle, LLamaTokenDataArrayNative&, SafeLLamaGrammarHandle)</strong></h3>
- <p>Apply constraints from grammar</p>
- <pre><code class="language-csharp">public static void llama_sample_grammar(SafeLLamaContextHandle ctx, LLamaTokenDataArrayNative& candidates, SafeLLamaGrammarHandle grammar)
- </code></pre>
- <h4 id="parameters_14">Parameters</h4>
- <p><code>ctx</code> <a href="../llama.native.safellamacontexthandle/">SafeLLamaContextHandle</a><br></p>
- <p><code>candidates</code> <a href="./llama.native.llamatokendataarraynative&.md">LLamaTokenDataArrayNative&</a><br></p>
- <p><code>grammar</code> <a href="../llama.native.safellamagrammarhandle/">SafeLLamaGrammarHandle</a><br></p>
- <h3 id="llama_grammar_accept_tokensafellamacontexthandle-safellamagrammarhandle-llamatoken"><strong>llama_grammar_accept_token(SafeLLamaContextHandle, SafeLLamaGrammarHandle, LLamaToken)</strong></h3>
- <p>Accepts the sampled token into the grammar</p>
- <pre><code class="language-csharp">public static void llama_grammar_accept_token(SafeLLamaContextHandle ctx, SafeLLamaGrammarHandle grammar, LLamaToken token)
- </code></pre>
- <h4 id="parameters_15">Parameters</h4>
- <p><code>ctx</code> <a href="../llama.native.safellamacontexthandle/">SafeLLamaContextHandle</a><br></p>
- <p><code>grammar</code> <a href="../llama.native.safellamagrammarhandle/">SafeLLamaGrammarHandle</a><br></p>
- <p><code>token</code> <a href="../llama.native.llamatoken/">LLamaToken</a><br></p>
- <h3 id="llava_validate_embed_sizesafellamacontexthandle-safellavamodelhandle"><strong>llava_validate_embed_size(SafeLLamaContextHandle, SafeLlavaModelHandle)</strong></h3>
- <p>Sanity check for clip <-> llava embed size match</p>
- <pre><code class="language-csharp">public static bool llava_validate_embed_size(SafeLLamaContextHandle ctxLlama, SafeLlavaModelHandle ctxClip)
- </code></pre>
- <h4 id="parameters_16">Parameters</h4>
- <p><code>ctxLlama</code> <a href="../llama.native.safellamacontexthandle/">SafeLLamaContextHandle</a><br>
- LLama Context</p>
- <p><code>ctxClip</code> <a href="../llama.native.safellavamodelhandle/">SafeLlavaModelHandle</a><br>
- Llava Model</p>
- <h4 id="returns_12">Returns</h4>
- <p><a href="https://docs.microsoft.com/en-us/dotnet/api/system.boolean">Boolean</a><br>
- True if validate successfully</p>
- <h3 id="llava_image_embed_make_with_bytessafellavamodelhandle-int32-byte-int32"><strong>llava_image_embed_make_with_bytes(SafeLlavaModelHandle, Int32, Byte[], Int32)</strong></h3>
- <p>Build an image embed from image file bytes</p>
- <pre><code class="language-csharp">public static SafeLlavaImageEmbedHandle llava_image_embed_make_with_bytes(SafeLlavaModelHandle ctx_clip, int n_threads, Byte[] image_bytes, int image_bytes_length)
- </code></pre>
- <h4 id="parameters_17">Parameters</h4>
- <p><code>ctx_clip</code> <a href="../llama.native.safellavamodelhandle/">SafeLlavaModelHandle</a><br>
- SafeHandle to the Clip Model</p>
- <p><code>n_threads</code> <a href="https://docs.microsoft.com/en-us/dotnet/api/system.int32">Int32</a><br>
- Number of threads</p>
- <p><code>image_bytes</code> <a href="https://docs.microsoft.com/en-us/dotnet/api/system.byte">Byte[]</a><br>
- Binary image in jpeg format</p>
- <p><code>image_bytes_length</code> <a href="https://docs.microsoft.com/en-us/dotnet/api/system.int32">Int32</a><br>
- Bytes lenght of the image</p>
- <h4 id="returns_13">Returns</h4>
- <p><a href="../llama.native.safellavaimageembedhandle/">SafeLlavaImageEmbedHandle</a><br>
- SafeHandle to the Embeddings</p>
- <h3 id="llava_image_embed_make_with_filenamesafellavamodelhandle-int32-string"><strong>llava_image_embed_make_with_filename(SafeLlavaModelHandle, Int32, String)</strong></h3>
- <p>Build an image embed from a path to an image filename</p>
- <pre><code class="language-csharp">public static SafeLlavaImageEmbedHandle llava_image_embed_make_with_filename(SafeLlavaModelHandle ctx_clip, int n_threads, string image_path)
- </code></pre>
- <h4 id="parameters_18">Parameters</h4>
- <p><code>ctx_clip</code> <a href="../llama.native.safellavamodelhandle/">SafeLlavaModelHandle</a><br>
- SafeHandle to the Clip Model</p>
- <p><code>n_threads</code> <a href="https://docs.microsoft.com/en-us/dotnet/api/system.int32">Int32</a><br>
- Number of threads</p>
- <p><code>image_path</code> <a href="https://docs.microsoft.com/en-us/dotnet/api/system.string">String</a><br>
- Image filename (jpeg) to generate embeddings</p>
- <h4 id="returns_14">Returns</h4>
- <p><a href="../llama.native.safellavaimageembedhandle/">SafeLlavaImageEmbedHandle</a><br>
- SafeHandel to the embeddings</p>
- <h3 id="llava_image_embed_freeintptr"><strong>llava_image_embed_free(IntPtr)</strong></h3>
- <p>Free an embedding made with llava_image_embed_make_*</p>
- <pre><code class="language-csharp">public static void llava_image_embed_free(IntPtr embed)
- </code></pre>
- <h4 id="parameters_19">Parameters</h4>
- <p><code>embed</code> <a href="https://docs.microsoft.com/en-us/dotnet/api/system.intptr">IntPtr</a><br>
- Embeddings to release</p>
- <h3 id="llava_eval_image_embedsafellamacontexthandle-safellavaimageembedhandle-int32-int32"><strong>llava_eval_image_embed(SafeLLamaContextHandle, SafeLlavaImageEmbedHandle, Int32, Int32&)</strong></h3>
- <p>Write the image represented by embed into the llama context with batch size n_batch, starting at context
- pos n_past. on completion, n_past points to the next position in the context after the image embed.</p>
- <pre><code class="language-csharp">public static bool llava_eval_image_embed(SafeLLamaContextHandle ctx_llama, SafeLlavaImageEmbedHandle embed, int n_batch, Int32& n_past)
- </code></pre>
- <h4 id="parameters_20">Parameters</h4>
- <p><code>ctx_llama</code> <a href="../llama.native.safellamacontexthandle/">SafeLLamaContextHandle</a><br>
- Llama Context</p>
- <p><code>embed</code> <a href="../llama.native.safellavaimageembedhandle/">SafeLlavaImageEmbedHandle</a><br>
- Embedding handle</p>
- <p><code>n_batch</code> <a href="https://docs.microsoft.com/en-us/dotnet/api/system.int32">Int32</a><br></p>
- <p><code>n_past</code> <a href="https://docs.microsoft.com/en-us/dotnet/api/system.int32&">Int32&</a><br></p>
- <h4 id="returns_15">Returns</h4>
- <p><a href="https://docs.microsoft.com/en-us/dotnet/api/system.boolean">Boolean</a><br>
- True on success</p>
- <h3 id="llama_model_quantizestring-string-llamamodelquantizeparams"><strong>llama_model_quantize(String, String, LLamaModelQuantizeParams*)</strong></h3>
- <p>Returns 0 on success</p>
- <pre><code class="language-csharp">public static uint llama_model_quantize(string fname_inp, string fname_out, LLamaModelQuantizeParams* param)
- </code></pre>
- <h4 id="parameters_21">Parameters</h4>
- <p><code>fname_inp</code> <a href="https://docs.microsoft.com/en-us/dotnet/api/system.string">String</a><br></p>
- <p><code>fname_out</code> <a href="https://docs.microsoft.com/en-us/dotnet/api/system.string">String</a><br></p>
- <p><code>param</code> <a href="./llama.native.llamamodelquantizeparams*.md">LLamaModelQuantizeParams*</a><br></p>
- <h4 id="returns_16">Returns</h4>
- <p><a href="https://docs.microsoft.com/en-us/dotnet/api/system.uint32">UInt32</a><br>
- Returns 0 on success</p>
- <h3 id="llama_sample_repetition_penaltiessafellamacontexthandle-llamatokendataarraynative-llamatoken-uint64-single-single-single"><strong>llama_sample_repetition_penalties(SafeLLamaContextHandle, LLamaTokenDataArrayNative&, LLamaToken*, UInt64, Single, Single, Single)</strong></h3>
- <p>Repetition penalty described in CTRL academic paper https://arxiv.org/abs/1909.05858, with negative logit fix.
- Frequency and presence penalties described in OpenAI API https://platform.openai.com/docs/api-reference/parameter-details.</p>
- <pre><code class="language-csharp">public static void llama_sample_repetition_penalties(SafeLLamaContextHandle ctx, LLamaTokenDataArrayNative& candidates, LLamaToken* last_tokens, ulong last_tokens_size, float penalty_repeat, float penalty_freq, float penalty_present)
- </code></pre>
- <h4 id="parameters_22">Parameters</h4>
- <p><code>ctx</code> <a href="../llama.native.safellamacontexthandle/">SafeLLamaContextHandle</a><br></p>
- <p><code>candidates</code> <a href="./llama.native.llamatokendataarraynative&.md">LLamaTokenDataArrayNative&</a><br>
- Pointer to LLamaTokenDataArray</p>
- <p><code>last_tokens</code> <a href="./llama.native.llamatoken*.md">LLamaToken*</a><br></p>
- <p><code>last_tokens_size</code> <a href="https://docs.microsoft.com/en-us/dotnet/api/system.uint64">UInt64</a><br></p>
- <p><code>penalty_repeat</code> <a href="https://docs.microsoft.com/en-us/dotnet/api/system.single">Single</a><br>
- Repetition penalty described in CTRL academic paper https://arxiv.org/abs/1909.05858, with negative logit fix.</p>
- <p><code>penalty_freq</code> <a href="https://docs.microsoft.com/en-us/dotnet/api/system.single">Single</a><br>
- Frequency and presence penalties described in OpenAI API https://platform.openai.com/docs/api-reference/parameter-details.</p>
- <p><code>penalty_present</code> <a href="https://docs.microsoft.com/en-us/dotnet/api/system.single">Single</a><br>
- Frequency and presence penalties described in OpenAI API https://platform.openai.com/docs/api-reference/parameter-details.</p>
- <h3 id="llama_sample_apply_guidancesafellamacontexthandle-spansingle-readonlyspansingle-single"><strong>llama_sample_apply_guidance(SafeLLamaContextHandle, Span<Single>, ReadOnlySpan<Single>, Single)</strong></h3>
- <p>Apply classifier-free guidance to the logits as described in academic paper "Stay on topic with Classifier-Free Guidance" https://arxiv.org/abs/2306.17806</p>
- <pre><code class="language-csharp">public static void llama_sample_apply_guidance(SafeLLamaContextHandle ctx, Span<float> logits, ReadOnlySpan<float> logits_guidance, float scale)
- </code></pre>
- <h4 id="parameters_23">Parameters</h4>
- <p><code>ctx</code> <a href="../llama.native.safellamacontexthandle/">SafeLLamaContextHandle</a><br></p>
- <p><code>logits</code> <a href="https://docs.microsoft.com/en-us/dotnet/api/system.span-1">Span<Single></a><br>
- Logits extracted from the original generation context.</p>
- <p><code>logits_guidance</code> <a href="https://docs.microsoft.com/en-us/dotnet/api/system.readonlyspan-1">ReadOnlySpan<Single></a><br>
- Logits extracted from a separate context from the same model.
- Other than a negative prompt at the beginning, it should have all generated and user input tokens copied from the main context.</p>
- <p><code>scale</code> <a href="https://docs.microsoft.com/en-us/dotnet/api/system.single">Single</a><br>
- Guidance strength. 1.0f means no guidance. Higher values mean stronger guidance.</p>
- <h3 id="llama_sample_apply_guidancesafellamacontexthandle-single-single-single"><strong>llama_sample_apply_guidance(SafeLLamaContextHandle, Single<em>, Single</em>, Single)</strong></h3>
- <p>Apply classifier-free guidance to the logits as described in academic paper "Stay on topic with Classifier-Free Guidance" https://arxiv.org/abs/2306.17806</p>
- <pre><code class="language-csharp">public static void llama_sample_apply_guidance(SafeLLamaContextHandle ctx, Single* logits, Single* logits_guidance, float scale)
- </code></pre>
- <h4 id="parameters_24">Parameters</h4>
- <p><code>ctx</code> <a href="../llama.native.safellamacontexthandle/">SafeLLamaContextHandle</a><br></p>
- <p><code>logits</code> <a href="https://docs.microsoft.com/en-us/dotnet/api/system.single*">Single*</a><br>
- Logits extracted from the original generation context.</p>
- <p><code>logits_guidance</code> <a href="https://docs.microsoft.com/en-us/dotnet/api/system.single*">Single*</a><br>
- Logits extracted from a separate context from the same model.
- Other than a negative prompt at the beginning, it should have all generated and user input tokens copied from the main context.</p>
- <p><code>scale</code> <a href="https://docs.microsoft.com/en-us/dotnet/api/system.single">Single</a><br>
- Guidance strength. 1.0f means no guidance. Higher values mean stronger guidance.</p>
- <h3 id="llama_sample_softmaxsafellamacontexthandle-llamatokendataarraynative"><strong>llama_sample_softmax(SafeLLamaContextHandle, LLamaTokenDataArrayNative&)</strong></h3>
- <p>Sorts candidate tokens by their logits in descending order and calculate probabilities based on logits.</p>
- <pre><code class="language-csharp">public static void llama_sample_softmax(SafeLLamaContextHandle ctx, LLamaTokenDataArrayNative& candidates)
- </code></pre>
- <h4 id="parameters_25">Parameters</h4>
- <p><code>ctx</code> <a href="../llama.native.safellamacontexthandle/">SafeLLamaContextHandle</a><br></p>
- <p><code>candidates</code> <a href="./llama.native.llamatokendataarraynative&.md">LLamaTokenDataArrayNative&</a><br>
- Pointer to LLamaTokenDataArray</p>
- <h3 id="llama_sample_top_ksafellamacontexthandle-llamatokendataarraynative-int32-uint64"><strong>llama_sample_top_k(SafeLLamaContextHandle, LLamaTokenDataArrayNative&, Int32, UInt64)</strong></h3>
- <p>Top-K sampling described in academic paper "The Curious Case of Neural Text Degeneration" https://arxiv.org/abs/1904.09751</p>
- <pre><code class="language-csharp">public static void llama_sample_top_k(SafeLLamaContextHandle ctx, LLamaTokenDataArrayNative& candidates, int k, ulong min_keep)
- </code></pre>
- <h4 id="parameters_26">Parameters</h4>
- <p><code>ctx</code> <a href="../llama.native.safellamacontexthandle/">SafeLLamaContextHandle</a><br></p>
- <p><code>candidates</code> <a href="./llama.native.llamatokendataarraynative&.md">LLamaTokenDataArrayNative&</a><br>
- Pointer to LLamaTokenDataArray</p>
- <p><code>k</code> <a href="https://docs.microsoft.com/en-us/dotnet/api/system.int32">Int32</a><br></p>
- <p><code>min_keep</code> <a href="https://docs.microsoft.com/en-us/dotnet/api/system.uint64">UInt64</a><br></p>
- <h3 id="llama_sample_top_psafellamacontexthandle-llamatokendataarraynative-single-uint64"><strong>llama_sample_top_p(SafeLLamaContextHandle, LLamaTokenDataArrayNative&, Single, UInt64)</strong></h3>
- <p>Nucleus sampling described in academic paper "The Curious Case of Neural Text Degeneration" https://arxiv.org/abs/1904.09751</p>
- <pre><code class="language-csharp">public static void llama_sample_top_p(SafeLLamaContextHandle ctx, LLamaTokenDataArrayNative& candidates, float p, ulong min_keep)
- </code></pre>
- <h4 id="parameters_27">Parameters</h4>
- <p><code>ctx</code> <a href="../llama.native.safellamacontexthandle/">SafeLLamaContextHandle</a><br></p>
- <p><code>candidates</code> <a href="./llama.native.llamatokendataarraynative&.md">LLamaTokenDataArrayNative&</a><br>
- Pointer to LLamaTokenDataArray</p>
- <p><code>p</code> <a href="https://docs.microsoft.com/en-us/dotnet/api/system.single">Single</a><br></p>
- <p><code>min_keep</code> <a href="https://docs.microsoft.com/en-us/dotnet/api/system.uint64">UInt64</a><br></p>
- <h3 id="llama_sample_min_psafellamacontexthandle-llamatokendataarraynative-single-uint64"><strong>llama_sample_min_p(SafeLLamaContextHandle, LLamaTokenDataArrayNative&, Single, UInt64)</strong></h3>
- <p>Minimum P sampling as described in https://github.com/ggerganov/llama.cpp/pull/3841</p>
- <pre><code class="language-csharp">public static void llama_sample_min_p(SafeLLamaContextHandle ctx, LLamaTokenDataArrayNative& candidates, float p, ulong min_keep)
- </code></pre>
- <h4 id="parameters_28">Parameters</h4>
- <p><code>ctx</code> <a href="../llama.native.safellamacontexthandle/">SafeLLamaContextHandle</a><br></p>
- <p><code>candidates</code> <a href="./llama.native.llamatokendataarraynative&.md">LLamaTokenDataArrayNative&</a><br>
- Pointer to LLamaTokenDataArray</p>
- <p><code>p</code> <a href="https://docs.microsoft.com/en-us/dotnet/api/system.single">Single</a><br></p>
- <p><code>min_keep</code> <a href="https://docs.microsoft.com/en-us/dotnet/api/system.uint64">UInt64</a><br></p>
- <h3 id="llama_sample_tail_freesafellamacontexthandle-llamatokendataarraynative-single-uint64"><strong>llama_sample_tail_free(SafeLLamaContextHandle, LLamaTokenDataArrayNative&, Single, UInt64)</strong></h3>
- <p>Tail Free Sampling described in https://www.trentonbricken.com/Tail-Free-Sampling/.</p>
- <pre><code class="language-csharp">public static void llama_sample_tail_free(SafeLLamaContextHandle ctx, LLamaTokenDataArrayNative& candidates, float z, ulong min_keep)
- </code></pre>
- <h4 id="parameters_29">Parameters</h4>
- <p><code>ctx</code> <a href="../llama.native.safellamacontexthandle/">SafeLLamaContextHandle</a><br></p>
- <p><code>candidates</code> <a href="./llama.native.llamatokendataarraynative&.md">LLamaTokenDataArrayNative&</a><br>
- Pointer to LLamaTokenDataArray</p>
- <p><code>z</code> <a href="https://docs.microsoft.com/en-us/dotnet/api/system.single">Single</a><br></p>
- <p><code>min_keep</code> <a href="https://docs.microsoft.com/en-us/dotnet/api/system.uint64">UInt64</a><br></p>
- <h3 id="llama_sample_typicalsafellamacontexthandle-llamatokendataarraynative-single-uint64"><strong>llama_sample_typical(SafeLLamaContextHandle, LLamaTokenDataArrayNative&, Single, UInt64)</strong></h3>
- <p>Locally Typical Sampling implementation described in the paper https://arxiv.org/abs/2202.00666.</p>
- <pre><code class="language-csharp">public static void llama_sample_typical(SafeLLamaContextHandle ctx, LLamaTokenDataArrayNative& candidates, float p, ulong min_keep)
- </code></pre>
- <h4 id="parameters_30">Parameters</h4>
- <p><code>ctx</code> <a href="../llama.native.safellamacontexthandle/">SafeLLamaContextHandle</a><br></p>
- <p><code>candidates</code> <a href="./llama.native.llamatokendataarraynative&.md">LLamaTokenDataArrayNative&</a><br>
- Pointer to LLamaTokenDataArray</p>
- <p><code>p</code> <a href="https://docs.microsoft.com/en-us/dotnet/api/system.single">Single</a><br></p>
- <p><code>min_keep</code> <a href="https://docs.microsoft.com/en-us/dotnet/api/system.uint64">UInt64</a><br></p>
- <h3 id="llama_sample_typicalsafellamacontexthandle-llamatokendataarraynative-single-single-single"><strong>llama_sample_typical(SafeLLamaContextHandle, LLamaTokenDataArrayNative&, Single, Single, Single)</strong></h3>
- <p>Dynamic temperature implementation described in the paper https://arxiv.org/abs/2309.02772.</p>
- <pre><code class="language-csharp">public static void llama_sample_typical(SafeLLamaContextHandle ctx, LLamaTokenDataArrayNative& candidates, float min_temp, float max_temp, float exponent_val)
- </code></pre>
- <h4 id="parameters_31">Parameters</h4>
- <p><code>ctx</code> <a href="../llama.native.safellamacontexthandle/">SafeLLamaContextHandle</a><br></p>
- <p><code>candidates</code> <a href="./llama.native.llamatokendataarraynative&.md">LLamaTokenDataArrayNative&</a><br>
- Pointer to LLamaTokenDataArray</p>
- <p><code>min_temp</code> <a href="https://docs.microsoft.com/en-us/dotnet/api/system.single">Single</a><br></p>
- <p><code>max_temp</code> <a href="https://docs.microsoft.com/en-us/dotnet/api/system.single">Single</a><br></p>
- <p><code>exponent_val</code> <a href="https://docs.microsoft.com/en-us/dotnet/api/system.single">Single</a><br></p>
- <h3 id="llama_sample_tempsafellamacontexthandle-llamatokendataarraynative-single"><strong>llama_sample_temp(SafeLLamaContextHandle, LLamaTokenDataArrayNative&, Single)</strong></h3>
- <p>Modify logits by temperature</p>
- <pre><code class="language-csharp">public static void llama_sample_temp(SafeLLamaContextHandle ctx, LLamaTokenDataArrayNative& candidates, float temp)
- </code></pre>
- <h4 id="parameters_32">Parameters</h4>
- <p><code>ctx</code> <a href="../llama.native.safellamacontexthandle/">SafeLLamaContextHandle</a><br></p>
- <p><code>candidates</code> <a href="./llama.native.llamatokendataarraynative&.md">LLamaTokenDataArrayNative&</a><br></p>
- <p><code>temp</code> <a href="https://docs.microsoft.com/en-us/dotnet/api/system.single">Single</a><br></p>
- <h3 id="llama_get_embeddingssafellamacontexthandle"><strong>llama_get_embeddings(SafeLLamaContextHandle)</strong></h3>
- <p>Get the embeddings for the input</p>
- <pre><code class="language-csharp">public static Span<float> llama_get_embeddings(SafeLLamaContextHandle ctx)
- </code></pre>
- <h4 id="parameters_33">Parameters</h4>
- <p><code>ctx</code> <a href="../llama.native.safellamacontexthandle/">SafeLLamaContextHandle</a><br></p>
- <h4 id="returns_17">Returns</h4>
- <p><a href="https://docs.microsoft.com/en-us/dotnet/api/system.span-1">Span<Single></a><br></p>
- <h3 id="llama_chat_apply_templatesafellamamodelhandle-char-llamachatmessage-intptr-boolean-char-int32"><strong>llama_chat_apply_template(SafeLlamaModelHandle, Char<em>, LLamaChatMessage</em>, IntPtr, Boolean, Char*, Int32)</strong></h3>
- <p>Apply chat template. Inspired by hf apply_chat_template() on python.
- Both "model" and "custom_template" are optional, but at least one is required. "custom_template" has higher precedence than "model"
- NOTE: This function does not use a jinja parser. It only support a pre-defined list of template. See more: https://github.com/ggerganov/llama.cpp/wiki/Templates-supported-by-llama_chat_apply_template</p>
- <pre><code class="language-csharp">public static int llama_chat_apply_template(SafeLlamaModelHandle model, Char* tmpl, LLamaChatMessage* chat, IntPtr n_msg, bool add_ass, Char* buf, int length)
- </code></pre>
- <h4 id="parameters_34">Parameters</h4>
- <p><code>model</code> <a href="../llama.native.safellamamodelhandle/">SafeLlamaModelHandle</a><br></p>
- <p><code>tmpl</code> <a href="https://docs.microsoft.com/en-us/dotnet/api/system.char*">Char*</a><br>
- A Jinja template to use for this chat. If this is nullptr, the model’s default chat template will be used instead.</p>
- <p><code>chat</code> <a href="./llama.native.llamachatmessage*.md">LLamaChatMessage*</a><br>
- Pointer to a list of multiple llama_chat_message</p>
- <p><code>n_msg</code> <a href="https://docs.microsoft.com/en-us/dotnet/api/system.intptr">IntPtr</a><br>
- Number of llama_chat_message in this chat</p>
- <p><code>add_ass</code> <a href="https://docs.microsoft.com/en-us/dotnet/api/system.boolean">Boolean</a><br>
- Whether to end the prompt with the token(s) that indicate the start of an assistant message.</p>
- <p><code>buf</code> <a href="https://docs.microsoft.com/en-us/dotnet/api/system.char*">Char*</a><br>
- A buffer to hold the output formatted prompt. The recommended alloc size is 2 * (total number of characters of all messages)</p>
- <p><code>length</code> <a href="https://docs.microsoft.com/en-us/dotnet/api/system.int32">Int32</a><br>
- The size of the allocated buffer</p>
- <h4 id="returns_18">Returns</h4>
- <p><a href="https://docs.microsoft.com/en-us/dotnet/api/system.int32">Int32</a><br>
- The total number of bytes of the formatted prompt. If is it larger than the size of buffer, you may need to re-alloc it and then re-apply the template.</p>
- <h3 id="llama_token_bossafellamamodelhandle"><strong>llama_token_bos(SafeLlamaModelHandle)</strong></h3>
- <p>Get the "Beginning of sentence" token</p>
- <pre><code class="language-csharp">public static LLamaToken llama_token_bos(SafeLlamaModelHandle model)
- </code></pre>
- <h4 id="parameters_35">Parameters</h4>
- <p><code>model</code> <a href="../llama.native.safellamamodelhandle/">SafeLlamaModelHandle</a><br></p>
- <h4 id="returns_19">Returns</h4>
- <p><a href="../llama.native.llamatoken/">LLamaToken</a><br></p>
- <h3 id="llama_token_eossafellamamodelhandle"><strong>llama_token_eos(SafeLlamaModelHandle)</strong></h3>
- <p>Get the "End of sentence" token</p>
- <pre><code class="language-csharp">public static LLamaToken llama_token_eos(SafeLlamaModelHandle model)
- </code></pre>
- <h4 id="parameters_36">Parameters</h4>
- <p><code>model</code> <a href="../llama.native.safellamamodelhandle/">SafeLlamaModelHandle</a><br></p>
- <h4 id="returns_20">Returns</h4>
- <p><a href="../llama.native.llamatoken/">LLamaToken</a><br></p>
- <h3 id="llama_token_nlsafellamamodelhandle"><strong>llama_token_nl(SafeLlamaModelHandle)</strong></h3>
- <p>Get the "new line" token</p>
- <pre><code class="language-csharp">public static LLamaToken llama_token_nl(SafeLlamaModelHandle model)
- </code></pre>
- <h4 id="parameters_37">Parameters</h4>
- <p><code>model</code> <a href="../llama.native.safellamamodelhandle/">SafeLlamaModelHandle</a><br></p>
- <h4 id="returns_21">Returns</h4>
- <p><a href="../llama.native.llamatoken/">LLamaToken</a><br></p>
- <h3 id="llama_add_bos_tokensafellamamodelhandle"><strong>llama_add_bos_token(SafeLlamaModelHandle)</strong></h3>
- <p>Returns -1 if unknown, 1 for true or 0 for false.</p>
- <pre><code class="language-csharp">public static int llama_add_bos_token(SafeLlamaModelHandle model)
- </code></pre>
- <h4 id="parameters_38">Parameters</h4>
- <p><code>model</code> <a href="../llama.native.safellamamodelhandle/">SafeLlamaModelHandle</a><br></p>
- <h4 id="returns_22">Returns</h4>
- <p><a href="https://docs.microsoft.com/en-us/dotnet/api/system.int32">Int32</a><br></p>
- <h3 id="llama_add_eos_tokensafellamamodelhandle"><strong>llama_add_eos_token(SafeLlamaModelHandle)</strong></h3>
- <p>Returns -1 if unknown, 1 for true or 0 for false.</p>
- <pre><code class="language-csharp">public static int llama_add_eos_token(SafeLlamaModelHandle model)
- </code></pre>
- <h4 id="parameters_39">Parameters</h4>
- <p><code>model</code> <a href="../llama.native.safellamamodelhandle/">SafeLlamaModelHandle</a><br></p>
- <h4 id="returns_23">Returns</h4>
- <p><a href="https://docs.microsoft.com/en-us/dotnet/api/system.int32">Int32</a><br></p>
- <h3 id="llama_token_prefixsafellamamodelhandle"><strong>llama_token_prefix(SafeLlamaModelHandle)</strong></h3>
- <p>codellama infill tokens, Beginning of infill prefix</p>
- <pre><code class="language-csharp">public static int llama_token_prefix(SafeLlamaModelHandle model)
- </code></pre>
- <h4 id="parameters_40">Parameters</h4>
- <p><code>model</code> <a href="../llama.native.safellamamodelhandle/">SafeLlamaModelHandle</a><br></p>
- <h4 id="returns_24">Returns</h4>
- <p><a href="https://docs.microsoft.com/en-us/dotnet/api/system.int32">Int32</a><br></p>
- <h3 id="llama_token_middlesafellamamodelhandle"><strong>llama_token_middle(SafeLlamaModelHandle)</strong></h3>
- <p>codellama infill tokens, Beginning of infill middle</p>
- <pre><code class="language-csharp">public static int llama_token_middle(SafeLlamaModelHandle model)
- </code></pre>
- <h4 id="parameters_41">Parameters</h4>
- <p><code>model</code> <a href="../llama.native.safellamamodelhandle/">SafeLlamaModelHandle</a><br></p>
- <h4 id="returns_25">Returns</h4>
- <p><a href="https://docs.microsoft.com/en-us/dotnet/api/system.int32">Int32</a><br></p>
- <h3 id="llama_token_suffixsafellamamodelhandle"><strong>llama_token_suffix(SafeLlamaModelHandle)</strong></h3>
- <p>codellama infill tokens, Beginning of infill suffix</p>
- <pre><code class="language-csharp">public static int llama_token_suffix(SafeLlamaModelHandle model)
- </code></pre>
- <h4 id="parameters_42">Parameters</h4>
- <p><code>model</code> <a href="../llama.native.safellamamodelhandle/">SafeLlamaModelHandle</a><br></p>
- <h4 id="returns_26">Returns</h4>
- <p><a href="https://docs.microsoft.com/en-us/dotnet/api/system.int32">Int32</a><br></p>
- <h3 id="llama_token_eotsafellamamodelhandle"><strong>llama_token_eot(SafeLlamaModelHandle)</strong></h3>
- <p>codellama infill tokens, End of infill middle</p>
- <pre><code class="language-csharp">public static int llama_token_eot(SafeLlamaModelHandle model)
- </code></pre>
- <h4 id="parameters_43">Parameters</h4>
- <p><code>model</code> <a href="../llama.native.safellamamodelhandle/">SafeLlamaModelHandle</a><br></p>
- <h4 id="returns_27">Returns</h4>
- <p><a href="https://docs.microsoft.com/en-us/dotnet/api/system.int32">Int32</a><br></p>
- <h3 id="llama_print_timingssafellamacontexthandle"><strong>llama_print_timings(SafeLLamaContextHandle)</strong></h3>
- <p>Print out timing information for this context</p>
- <pre><code class="language-csharp">public static void llama_print_timings(SafeLLamaContextHandle ctx)
- </code></pre>
- <h4 id="parameters_44">Parameters</h4>
- <p><code>ctx</code> <a href="../llama.native.safellamacontexthandle/">SafeLLamaContextHandle</a><br></p>
- <h3 id="llama_reset_timingssafellamacontexthandle"><strong>llama_reset_timings(SafeLLamaContextHandle)</strong></h3>
- <p>Reset all collected timing information for this context</p>
- <pre><code class="language-csharp">public static void llama_reset_timings(SafeLLamaContextHandle ctx)
- </code></pre>
- <h4 id="parameters_45">Parameters</h4>
- <p><code>ctx</code> <a href="../llama.native.safellamacontexthandle/">SafeLLamaContextHandle</a><br></p>
- <h3 id="llama_print_system_info"><strong>llama_print_system_info()</strong></h3>
- <p>Print system information</p>
- <pre><code class="language-csharp">public static IntPtr llama_print_system_info()
- </code></pre>
- <h4 id="returns_28">Returns</h4>
- <p><a href="https://docs.microsoft.com/en-us/dotnet/api/system.intptr">IntPtr</a><br></p>
- <h3 id="llama_token_to_piecesafellamamodelhandle-llamatoken-spanbyte"><strong>llama_token_to_piece(SafeLlamaModelHandle, LLamaToken, Span<Byte>)</strong></h3>
- <p>Convert a single token into text</p>
- <pre><code class="language-csharp">public static int llama_token_to_piece(SafeLlamaModelHandle model, LLamaToken llamaToken, Span<byte> buffer)
- </code></pre>
- <h4 id="parameters_46">Parameters</h4>
- <p><code>model</code> <a href="../llama.native.safellamamodelhandle/">SafeLlamaModelHandle</a><br></p>
- <p><code>llamaToken</code> <a href="../llama.native.llamatoken/">LLamaToken</a><br></p>
- <p><code>buffer</code> <a href="https://docs.microsoft.com/en-us/dotnet/api/system.span-1">Span<Byte></a><br>
- buffer to write string into</p>
- <h4 id="returns_29">Returns</h4>
- <p><a href="https://docs.microsoft.com/en-us/dotnet/api/system.int32">Int32</a><br>
- The length written, or if the buffer is too small a negative that indicates the length required</p>
- <h3 id="llama_tokenizesafellamamodelhandle-byte-int32-llamatoken-int32-boolean-boolean"><strong>llama_tokenize(SafeLlamaModelHandle, Byte<em>, Int32, LLamaToken</em>, Int32, Boolean, Boolean)</strong></h3>
- <p>Convert text into tokens</p>
- <pre><code class="language-csharp">public static int llama_tokenize(SafeLlamaModelHandle model, Byte* text, int text_len, LLamaToken* tokens, int n_max_tokens, bool add_bos, bool special)
- </code></pre>
- <h4 id="parameters_47">Parameters</h4>
- <p><code>model</code> <a href="../llama.native.safellamamodelhandle/">SafeLlamaModelHandle</a><br></p>
- <p><code>text</code> <a href="https://docs.microsoft.com/en-us/dotnet/api/system.byte*">Byte*</a><br></p>
- <p><code>text_len</code> <a href="https://docs.microsoft.com/en-us/dotnet/api/system.int32">Int32</a><br></p>
- <p><code>tokens</code> <a href="./llama.native.llamatoken*.md">LLamaToken*</a><br></p>
- <p><code>n_max_tokens</code> <a href="https://docs.microsoft.com/en-us/dotnet/api/system.int32">Int32</a><br></p>
- <p><code>add_bos</code> <a href="https://docs.microsoft.com/en-us/dotnet/api/system.boolean">Boolean</a><br></p>
- <p><code>special</code> <a href="https://docs.microsoft.com/en-us/dotnet/api/system.boolean">Boolean</a><br>
- Allow tokenizing special and/or control tokens which otherwise are not exposed and treated as plaintext. Does not insert a leading space.</p>
- <h4 id="returns_30">Returns</h4>
- <p><a href="https://docs.microsoft.com/en-us/dotnet/api/system.int32">Int32</a><br>
- Returns the number of tokens on success, no more than n_max_tokens.
- Returns a negative number on failure - the number of tokens that would have been returned</p>
- <h3 id="llama_log_setllamalogcallback"><strong>llama_log_set(LLamaLogCallback)</strong></h3>
- <p>Register a callback to receive llama log messages</p>
- <pre><code class="language-csharp">public static void llama_log_set(LLamaLogCallback logCallback)
- </code></pre>
- <h4 id="parameters_48">Parameters</h4>
- <p><code>logCallback</code> <a href="./llama.native.llamalogcallback.md">LLamaLogCallback</a><br></p>
- <h3 id="llama_kv_cache_clearsafellamacontexthandle"><strong>llama_kv_cache_clear(SafeLLamaContextHandle)</strong></h3>
- <p>Clear the KV cache</p>
- <pre><code class="language-csharp">public static void llama_kv_cache_clear(SafeLLamaContextHandle ctx)
- </code></pre>
- <h4 id="parameters_49">Parameters</h4>
- <p><code>ctx</code> <a href="../llama.native.safellamacontexthandle/">SafeLLamaContextHandle</a><br></p>
- <h3 id="llama_kv_cache_seq_rmsafellamacontexthandle-llamaseqid-llamapos-llamapos"><strong>llama_kv_cache_seq_rm(SafeLLamaContextHandle, LLamaSeqId, LLamaPos, LLamaPos)</strong></h3>
- <p>Removes all tokens that belong to the specified sequence and have positions in [p0, p1)</p>
- <pre><code class="language-csharp">public static void llama_kv_cache_seq_rm(SafeLLamaContextHandle ctx, LLamaSeqId seq, LLamaPos p0, LLamaPos p1)
- </code></pre>
- <h4 id="parameters_50">Parameters</h4>
- <p><code>ctx</code> <a href="../llama.native.safellamacontexthandle/">SafeLLamaContextHandle</a><br></p>
- <p><code>seq</code> <a href="../llama.native.llamaseqid/">LLamaSeqId</a><br></p>
- <p><code>p0</code> <a href="../llama.native.llamapos/">LLamaPos</a><br></p>
- <p><code>p1</code> <a href="../llama.native.llamapos/">LLamaPos</a><br></p>
- <h3 id="llama_kv_cache_seq_cpsafellamacontexthandle-llamaseqid-llamaseqid-llamapos-llamapos"><strong>llama_kv_cache_seq_cp(SafeLLamaContextHandle, LLamaSeqId, LLamaSeqId, LLamaPos, LLamaPos)</strong></h3>
- <p>Copy all tokens that belong to the specified sequence to another sequence
- Note that this does not allocate extra KV cache memory - it simply assigns the tokens to the new sequence</p>
- <pre><code class="language-csharp">public static void llama_kv_cache_seq_cp(SafeLLamaContextHandle ctx, LLamaSeqId src, LLamaSeqId dest, LLamaPos p0, LLamaPos p1)
- </code></pre>
- <h4 id="parameters_51">Parameters</h4>
- <p><code>ctx</code> <a href="../llama.native.safellamacontexthandle/">SafeLLamaContextHandle</a><br></p>
- <p><code>src</code> <a href="../llama.native.llamaseqid/">LLamaSeqId</a><br></p>
- <p><code>dest</code> <a href="../llama.native.llamaseqid/">LLamaSeqId</a><br></p>
- <p><code>p0</code> <a href="../llama.native.llamapos/">LLamaPos</a><br></p>
- <p><code>p1</code> <a href="../llama.native.llamapos/">LLamaPos</a><br></p>
- <h3 id="llama_kv_cache_seq_keepsafellamacontexthandle-llamaseqid"><strong>llama_kv_cache_seq_keep(SafeLLamaContextHandle, LLamaSeqId)</strong></h3>
- <p>Removes all tokens that do not belong to the specified sequence</p>
- <pre><code class="language-csharp">public static void llama_kv_cache_seq_keep(SafeLLamaContextHandle ctx, LLamaSeqId seq)
- </code></pre>
- <h4 id="parameters_52">Parameters</h4>
- <p><code>ctx</code> <a href="../llama.native.safellamacontexthandle/">SafeLLamaContextHandle</a><br></p>
- <p><code>seq</code> <a href="../llama.native.llamaseqid/">LLamaSeqId</a><br></p>
- <h3 id="llama_kv_cache_seq_addsafellamacontexthandle-llamaseqid-llamapos-llamapos-int32"><strong>llama_kv_cache_seq_add(SafeLLamaContextHandle, LLamaSeqId, LLamaPos, LLamaPos, Int32)</strong></h3>
- <p>Adds relative position "delta" to all tokens that belong to the specified sequence and have positions in [p0, p1)
- If the KV cache is RoPEd, the KV data is updated accordingly:
- - lazily on next llama_decode()
- - explicitly with llama_kv_cache_update()</p>
- <pre><code class="language-csharp">public static void llama_kv_cache_seq_add(SafeLLamaContextHandle ctx, LLamaSeqId seq, LLamaPos p0, LLamaPos p1, int delta)
- </code></pre>
- <h4 id="parameters_53">Parameters</h4>
- <p><code>ctx</code> <a href="../llama.native.safellamacontexthandle/">SafeLLamaContextHandle</a><br></p>
- <p><code>seq</code> <a href="../llama.native.llamaseqid/">LLamaSeqId</a><br></p>
- <p><code>p0</code> <a href="../llama.native.llamapos/">LLamaPos</a><br></p>
- <p><code>p1</code> <a href="../llama.native.llamapos/">LLamaPos</a><br></p>
- <p><code>delta</code> <a href="https://docs.microsoft.com/en-us/dotnet/api/system.int32">Int32</a><br></p>
- <h3 id="llama_kv_cache_seq_divsafellamacontexthandle-llamaseqid-llamapos-llamapos-int32"><strong>llama_kv_cache_seq_div(SafeLLamaContextHandle, LLamaSeqId, LLamaPos, LLamaPos, Int32)</strong></h3>
- <p>Integer division of the positions by factor of <code>d &gt; 1</code>
- If the KV cache is RoPEd, the KV data is updated accordingly:
- - lazily on next llama_decode()
- - explicitly with llama_kv_cache_update()
- <br>
- p0 < 0 : [0, p1]
- <br>
- p1 < 0 : [p0, inf)</p>
- <pre><code class="language-csharp">public static void llama_kv_cache_seq_div(SafeLLamaContextHandle ctx, LLamaSeqId seq, LLamaPos p0, LLamaPos p1, int d)
- </code></pre>
- <h4 id="parameters_54">Parameters</h4>
- <p><code>ctx</code> <a href="../llama.native.safellamacontexthandle/">SafeLLamaContextHandle</a><br></p>
- <p><code>seq</code> <a href="../llama.native.llamaseqid/">LLamaSeqId</a><br></p>
- <p><code>p0</code> <a href="../llama.native.llamapos/">LLamaPos</a><br></p>
- <p><code>p1</code> <a href="../llama.native.llamapos/">LLamaPos</a><br></p>
- <p><code>d</code> <a href="https://docs.microsoft.com/en-us/dotnet/api/system.int32">Int32</a><br></p>
- <h3 id="llama_kv_cache_seq_pos_maxsafellamacontexthandle-llamaseqid"><strong>llama_kv_cache_seq_pos_max(SafeLLamaContextHandle, LLamaSeqId)</strong></h3>
- <p>Returns the largest position present in the KV cache for the specified sequence</p>
- <pre><code class="language-csharp">public static LLamaPos llama_kv_cache_seq_pos_max(SafeLLamaContextHandle ctx, LLamaSeqId seq)
- </code></pre>
- <h4 id="parameters_55">Parameters</h4>
- <p><code>ctx</code> <a href="../llama.native.safellamacontexthandle/">SafeLLamaContextHandle</a><br></p>
- <p><code>seq</code> <a href="../llama.native.llamaseqid/">LLamaSeqId</a><br></p>
- <h4 id="returns_31">Returns</h4>
- <p><a href="../llama.native.llamapos/">LLamaPos</a><br></p>
- <h3 id="llama_kv_cache_defragsafellamacontexthandle"><strong>llama_kv_cache_defrag(SafeLLamaContextHandle)</strong></h3>
- <p>Defragment the KV cache. This will be applied:
- - lazily on next llama_decode()
- - explicitly with llama_kv_cache_update()</p>
- <pre><code class="language-csharp">public static LLamaPos llama_kv_cache_defrag(SafeLLamaContextHandle ctx)
- </code></pre>
- <h4 id="parameters_56">Parameters</h4>
- <p><code>ctx</code> <a href="../llama.native.safellamacontexthandle/">SafeLLamaContextHandle</a><br></p>
- <h4 id="returns_32">Returns</h4>
- <p><a href="../llama.native.llamapos/">LLamaPos</a><br></p>
- <h3 id="llama_kv_cache_updatesafellamacontexthandle"><strong>llama_kv_cache_update(SafeLLamaContextHandle)</strong></h3>
- <p>Apply the KV cache updates (such as K-shifts, defragmentation, etc.)</p>
- <pre><code class="language-csharp">public static void llama_kv_cache_update(SafeLLamaContextHandle ctx)
- </code></pre>
- <h4 id="parameters_57">Parameters</h4>
- <p><code>ctx</code> <a href="../llama.native.safellamacontexthandle/">SafeLLamaContextHandle</a><br></p>
- <h3 id="llama_batch_initint32-int32-int32"><strong>llama_batch_init(Int32, Int32, Int32)</strong></h3>
- <p>Allocates a batch of tokens on the heap
- Each token can be assigned up to n_seq_max sequence ids
- The batch has to be freed with llama_batch_free()
- If embd != 0, llama_batch.embd will be allocated with size of n_tokens * embd * sizeof(float)
- Otherwise, llama_batch.token will be allocated to store n_tokens llama_token
- The rest of the llama_batch members are allocated with size n_tokens
- All members are left uninitialized</p>
- <pre><code class="language-csharp">public static LLamaNativeBatch llama_batch_init(int n_tokens, int embd, int n_seq_max)
- </code></pre>
- <h4 id="parameters_58">Parameters</h4>
- <p><code>n_tokens</code> <a href="https://docs.microsoft.com/en-us/dotnet/api/system.int32">Int32</a><br></p>
- <p><code>embd</code> <a href="https://docs.microsoft.com/en-us/dotnet/api/system.int32">Int32</a><br></p>
- <p><code>n_seq_max</code> <a href="https://docs.microsoft.com/en-us/dotnet/api/system.int32">Int32</a><br>
- Each token can be assigned up to n_seq_max sequence ids</p>
- <h4 id="returns_33">Returns</h4>
- <p><a href="../llama.native.llamanativebatch/">LLamaNativeBatch</a><br></p>
- <h3 id="llama_batch_freellamanativebatch"><strong>llama_batch_free(LLamaNativeBatch)</strong></h3>
- <p>Frees a batch of tokens allocated with llama_batch_init()</p>
- <pre><code class="language-csharp">public static void llama_batch_free(LLamaNativeBatch batch)
- </code></pre>
- <h4 id="parameters_59">Parameters</h4>
- <p><code>batch</code> <a href="../llama.native.llamanativebatch/">LLamaNativeBatch</a><br></p>
- <h3 id="llama_decodesafellamacontexthandle-llamanativebatch"><strong>llama_decode(SafeLLamaContextHandle, LLamaNativeBatch)</strong></h3>
- <pre><code class="language-csharp">public static int llama_decode(SafeLLamaContextHandle ctx, LLamaNativeBatch batch)
- </code></pre>
- <h4 id="parameters_60">Parameters</h4>
- <p><code>ctx</code> <a href="../llama.native.safellamacontexthandle/">SafeLLamaContextHandle</a><br></p>
- <p><code>batch</code> <a href="../llama.native.llamanativebatch/">LLamaNativeBatch</a><br></p>
- <h4 id="returns_34">Returns</h4>
- <p><a href="https://docs.microsoft.com/en-us/dotnet/api/system.int32">Int32</a><br>
- Positive return values does not mean a fatal error, but rather a warning:<br>
- - 0: success<br>
- - 1: could not find a KV slot for the batch (try reducing the size of the batch or increase the context)<br>
- - < 0: error<br></p>
- <h3 id="llama_kv_cache_view_initsafellamacontexthandle-int32"><strong>llama_kv_cache_view_init(SafeLLamaContextHandle, Int32)</strong></h3>
- <p>Create an empty KV cache view. (use only for debugging purposes)</p>
- <pre><code class="language-csharp">public static LLamaKvCacheView llama_kv_cache_view_init(SafeLLamaContextHandle ctx, int n_max_seq)
- </code></pre>
- <h4 id="parameters_61">Parameters</h4>
- <p><code>ctx</code> <a href="../llama.native.safellamacontexthandle/">SafeLLamaContextHandle</a><br></p>
- <p><code>n_max_seq</code> <a href="https://docs.microsoft.com/en-us/dotnet/api/system.int32">Int32</a><br></p>
- <h4 id="returns_35">Returns</h4>
- <p><a href="../llama.native.llamakvcacheview/">LLamaKvCacheView</a><br></p>
- <h3 id="llama_kv_cache_view_freellamakvcacheview"><strong>llama_kv_cache_view_free(LLamaKvCacheView&)</strong></h3>
- <p>Free a KV cache view. (use only for debugging purposes)</p>
- <pre><code class="language-csharp">public static void llama_kv_cache_view_free(LLamaKvCacheView& view)
- </code></pre>
- <h4 id="parameters_62">Parameters</h4>
- <p><code>view</code> <a href="./llama.native.llamakvcacheview&.md">LLamaKvCacheView&</a><br></p>
- <h3 id="llama_kv_cache_view_updatesafellamacontexthandle-llamakvcacheview"><strong>llama_kv_cache_view_update(SafeLLamaContextHandle, LLamaKvCacheView&)</strong></h3>
- <p>Update the KV cache view structure with the current state of the KV cache. (use only for debugging purposes)</p>
- <pre><code class="language-csharp">public static void llama_kv_cache_view_update(SafeLLamaContextHandle ctx, LLamaKvCacheView& view)
- </code></pre>
- <h4 id="parameters_63">Parameters</h4>
- <p><code>ctx</code> <a href="../llama.native.safellamacontexthandle/">SafeLLamaContextHandle</a><br></p>
- <p><code>view</code> <a href="./llama.native.llamakvcacheview&.md">LLamaKvCacheView&</a><br></p>
- <h3 id="llama_get_kv_cache_token_countsafellamacontexthandle"><strong>llama_get_kv_cache_token_count(SafeLLamaContextHandle)</strong></h3>
- <p>Returns the number of tokens in the KV cache (slow, use only for debug)
- If a KV cell has multiple sequences assigned to it, it will be counted multiple times</p>
- <pre><code class="language-csharp">public static int llama_get_kv_cache_token_count(SafeLLamaContextHandle ctx)
- </code></pre>
- <h4 id="parameters_64">Parameters</h4>
- <p><code>ctx</code> <a href="../llama.native.safellamacontexthandle/">SafeLLamaContextHandle</a><br></p>
- <h4 id="returns_36">Returns</h4>
- <p><a href="https://docs.microsoft.com/en-us/dotnet/api/system.int32">Int32</a><br></p>
- <h3 id="llama_get_kv_cache_used_cellssafellamacontexthandle"><strong>llama_get_kv_cache_used_cells(SafeLLamaContextHandle)</strong></h3>
- <p>Returns the number of used KV cells (i.e. have at least one sequence assigned to them)</p>
- <pre><code class="language-csharp">public static int llama_get_kv_cache_used_cells(SafeLLamaContextHandle ctx)
- </code></pre>
- <h4 id="parameters_65">Parameters</h4>
- <p><code>ctx</code> <a href="../llama.native.safellamacontexthandle/">SafeLLamaContextHandle</a><br></p>
- <h4 id="returns_37">Returns</h4>
- <p><a href="https://docs.microsoft.com/en-us/dotnet/api/system.int32">Int32</a><br></p>
- <h3 id="llama_beam_searchsafellamacontexthandle-llamabeamsearchcallback-intptr-uint64-int32-int32-int32"><strong>llama_beam_search(SafeLLamaContextHandle, LLamaBeamSearchCallback, IntPtr, UInt64, Int32, Int32, Int32)</strong></h3>
- <p>Deterministically returns entire sentence constructed by a beam search.</p>
- <pre><code class="language-csharp">public static void llama_beam_search(SafeLLamaContextHandle ctx, LLamaBeamSearchCallback callback, IntPtr callback_data, ulong n_beams, int n_past, int n_predict, int n_threads)
- </code></pre>
- <h4 id="parameters_66">Parameters</h4>
- <p><code>ctx</code> <a href="../llama.native.safellamacontexthandle/">SafeLLamaContextHandle</a><br>
- Pointer to the llama_context.</p>
- <p><code>callback</code> <a href="./llama.native.nativeapi.llamabeamsearchcallback.md">LLamaBeamSearchCallback</a><br>
- Invoked for each iteration of the beam_search loop, passing in beams_state.</p>
- <p><code>callback_data</code> <a href="https://docs.microsoft.com/en-us/dotnet/api/system.intptr">IntPtr</a><br>
- A pointer that is simply passed back to callback.</p>
- <p><code>n_beams</code> <a href="https://docs.microsoft.com/en-us/dotnet/api/system.uint64">UInt64</a><br>
- Number of beams to use.</p>
- <p><code>n_past</code> <a href="https://docs.microsoft.com/en-us/dotnet/api/system.int32">Int32</a><br>
- Number of tokens already evaluated.</p>
- <p><code>n_predict</code> <a href="https://docs.microsoft.com/en-us/dotnet/api/system.int32">Int32</a><br>
- Maximum number of tokens to predict. EOS may occur earlier.</p>
- <p><code>n_threads</code> <a href="https://docs.microsoft.com/en-us/dotnet/api/system.int32">Int32</a><br>
- Number of threads.</p>
- <h3 id="llama_empty_call"><strong>llama_empty_call()</strong></h3>
- <p>A method that does nothing. This is a native method, calling it will force the llama native dependencies to be loaded.</p>
- <pre><code class="language-csharp">public static void llama_empty_call()
- </code></pre>
- <h3 id="llama_max_devices"><strong>llama_max_devices()</strong></h3>
- <p>Get the maximum number of devices supported by llama.cpp</p>
- <pre><code class="language-csharp">public static long llama_max_devices()
- </code></pre>
- <h4 id="returns_38">Returns</h4>
- <p><a href="https://docs.microsoft.com/en-us/dotnet/api/system.int64">Int64</a><br></p>
- <h3 id="llama_model_default_params"><strong>llama_model_default_params()</strong></h3>
- <p>Create a LLamaModelParams with default values</p>
- <pre><code class="language-csharp">public static LLamaModelParams llama_model_default_params()
- </code></pre>
- <h4 id="returns_39">Returns</h4>
- <p><a href="../llama.native.llamamodelparams/">LLamaModelParams</a><br></p>
- <h3 id="llama_context_default_params"><strong>llama_context_default_params()</strong></h3>
- <p>Create a LLamaContextParams with default values</p>
- <pre><code class="language-csharp">public static LLamaContextParams llama_context_default_params()
- </code></pre>
- <h4 id="returns_40">Returns</h4>
- <p><a href="../llama.native.llamacontextparams/">LLamaContextParams</a><br></p>
- <h3 id="llama_model_quantize_default_params"><strong>llama_model_quantize_default_params()</strong></h3>
- <p>Create a LLamaModelQuantizeParams with default values</p>
- <pre><code class="language-csharp">public static LLamaModelQuantizeParams llama_model_quantize_default_params()
- </code></pre>
- <h4 id="returns_41">Returns</h4>
- <p><a href="../llama.native.llamamodelquantizeparams/">LLamaModelQuantizeParams</a><br></p>
- <h3 id="llama_supports_mmap"><strong>llama_supports_mmap()</strong></h3>
- <p>Check if memory mapping is supported</p>
- <pre><code class="language-csharp">public static bool llama_supports_mmap()
- </code></pre>
- <h4 id="returns_42">Returns</h4>
- <p><a href="https://docs.microsoft.com/en-us/dotnet/api/system.boolean">Boolean</a><br></p>
- <h3 id="llama_supports_mlock"><strong>llama_supports_mlock()</strong></h3>
- <p>Check if memory locking is supported</p>
- <pre><code class="language-csharp">public static bool llama_supports_mlock()
- </code></pre>
- <h4 id="returns_43">Returns</h4>
- <p><a href="https://docs.microsoft.com/en-us/dotnet/api/system.boolean">Boolean</a><br></p>
- <h3 id="llama_supports_gpu_offload"><strong>llama_supports_gpu_offload()</strong></h3>
- <p>Check if GPU offload is supported</p>
- <pre><code class="language-csharp">public static bool llama_supports_gpu_offload()
- </code></pre>
- <h4 id="returns_44">Returns</h4>
- <p><a href="https://docs.microsoft.com/en-us/dotnet/api/system.boolean">Boolean</a><br></p>
- <h3 id="llama_set_rng_seedsafellamacontexthandle-uint32"><strong>llama_set_rng_seed(SafeLLamaContextHandle, UInt32)</strong></h3>
- <p>Sets the current rng seed.</p>
- <pre><code class="language-csharp">public static void llama_set_rng_seed(SafeLLamaContextHandle ctx, uint seed)
- </code></pre>
- <h4 id="parameters_67">Parameters</h4>
- <p><code>ctx</code> <a href="../llama.native.safellamacontexthandle/">SafeLLamaContextHandle</a><br></p>
- <p><code>seed</code> <a href="https://docs.microsoft.com/en-us/dotnet/api/system.uint32">UInt32</a><br></p>
- <h3 id="llama_get_state_sizesafellamacontexthandle"><strong>llama_get_state_size(SafeLLamaContextHandle)</strong></h3>
- <p>Returns the maximum size in bytes of the state (rng, logits, embedding
- and kv_cache) - will often be smaller after compacting tokens</p>
- <pre><code class="language-csharp">public static ulong llama_get_state_size(SafeLLamaContextHandle ctx)
- </code></pre>
- <h4 id="parameters_68">Parameters</h4>
- <p><code>ctx</code> <a href="../llama.native.safellamacontexthandle/">SafeLLamaContextHandle</a><br></p>
- <h4 id="returns_45">Returns</h4>
- <p><a href="https://docs.microsoft.com/en-us/dotnet/api/system.uint64">UInt64</a><br></p>
- <h3 id="llama_copy_state_datasafellamacontexthandle-byte"><strong>llama_copy_state_data(SafeLLamaContextHandle, Byte*)</strong></h3>
- <p>Copies the state to the specified destination address.
- Destination needs to have allocated enough memory.</p>
- <pre><code class="language-csharp">public static ulong llama_copy_state_data(SafeLLamaContextHandle ctx, Byte* dest)
- </code></pre>
- <h4 id="parameters_69">Parameters</h4>
- <p><code>ctx</code> <a href="../llama.native.safellamacontexthandle/">SafeLLamaContextHandle</a><br></p>
- <p><code>dest</code> <a href="https://docs.microsoft.com/en-us/dotnet/api/system.byte*">Byte*</a><br></p>
- <h4 id="returns_46">Returns</h4>
- <p><a href="https://docs.microsoft.com/en-us/dotnet/api/system.uint64">UInt64</a><br>
- the number of bytes copied</p>
- <h3 id="llama_set_state_datasafellamacontexthandle-byte"><strong>llama_set_state_data(SafeLLamaContextHandle, Byte*)</strong></h3>
- <p>Set the state reading from the specified address</p>
- <pre><code class="language-csharp">public static ulong llama_set_state_data(SafeLLamaContextHandle ctx, Byte* src)
- </code></pre>
- <h4 id="parameters_70">Parameters</h4>
- <p><code>ctx</code> <a href="../llama.native.safellamacontexthandle/">SafeLLamaContextHandle</a><br></p>
- <p><code>src</code> <a href="https://docs.microsoft.com/en-us/dotnet/api/system.byte*">Byte*</a><br></p>
- <h4 id="returns_47">Returns</h4>
- <p><a href="https://docs.microsoft.com/en-us/dotnet/api/system.uint64">UInt64</a><br>
- the number of bytes read</p>
- <h3 id="llama_load_session_filesafellamacontexthandle-string-llamatoken-uint64-uint64"><strong>llama_load_session_file(SafeLLamaContextHandle, String, LLamaToken[], UInt64, UInt64&)</strong></h3>
- <p>Load session file</p>
- <pre><code class="language-csharp">public static bool llama_load_session_file(SafeLLamaContextHandle ctx, string path_session, LLamaToken[] tokens_out, ulong n_token_capacity, UInt64& n_token_count_out)
- </code></pre>
- <h4 id="parameters_71">Parameters</h4>
- <p><code>ctx</code> <a href="../llama.native.safellamacontexthandle/">SafeLLamaContextHandle</a><br></p>
- <p><code>path_session</code> <a href="https://docs.microsoft.com/en-us/dotnet/api/system.string">String</a><br></p>
- <p><code>tokens_out</code> <a href="../llama.native.llamatoken/">LLamaToken[]</a><br></p>
- <p><code>n_token_capacity</code> <a href="https://docs.microsoft.com/en-us/dotnet/api/system.uint64">UInt64</a><br></p>
- <p><code>n_token_count_out</code> <a href="https://docs.microsoft.com/en-us/dotnet/api/system.uint64&">UInt64&</a><br></p>
- <h4 id="returns_48">Returns</h4>
- <p><a href="https://docs.microsoft.com/en-us/dotnet/api/system.boolean">Boolean</a><br></p>
- <h3 id="llama_save_session_filesafellamacontexthandle-string-llamatoken-uint64"><strong>llama_save_session_file(SafeLLamaContextHandle, String, LLamaToken[], UInt64)</strong></h3>
- <p>Save session file</p>
- <pre><code class="language-csharp">public static bool llama_save_session_file(SafeLLamaContextHandle ctx, string path_session, LLamaToken[] tokens, ulong n_token_count)
- </code></pre>
- <h4 id="parameters_72">Parameters</h4>
- <p><code>ctx</code> <a href="../llama.native.safellamacontexthandle/">SafeLLamaContextHandle</a><br></p>
- <p><code>path_session</code> <a href="https://docs.microsoft.com/en-us/dotnet/api/system.string">String</a><br></p>
- <p><code>tokens</code> <a href="../llama.native.llamatoken/">LLamaToken[]</a><br></p>
- <p><code>n_token_count</code> <a href="https://docs.microsoft.com/en-us/dotnet/api/system.uint64">UInt64</a><br></p>
- <h4 id="returns_49">Returns</h4>
- <p><a href="https://docs.microsoft.com/en-us/dotnet/api/system.boolean">Boolean</a><br></p>
- <h3 id="llama_token_get_textsafellamamodelhandle-llamatoken"><strong>llama_token_get_text(SafeLlamaModelHandle, LLamaToken)</strong></h3>
- <pre><code class="language-csharp">public static Byte* llama_token_get_text(SafeLlamaModelHandle model, LLamaToken token)
- </code></pre>
- <h4 id="parameters_73">Parameters</h4>
- <p><code>model</code> <a href="../llama.native.safellamamodelhandle/">SafeLlamaModelHandle</a><br></p>
- <p><code>token</code> <a href="../llama.native.llamatoken/">LLamaToken</a><br></p>
- <h4 id="returns_50">Returns</h4>
- <p><a href="https://docs.microsoft.com/en-us/dotnet/api/system.byte*">Byte*</a><br></p>
- <h3 id="llama_token_get_scoresafellamamodelhandle-llamatoken"><strong>llama_token_get_score(SafeLlamaModelHandle, LLamaToken)</strong></h3>
- <pre><code class="language-csharp">public static float llama_token_get_score(SafeLlamaModelHandle model, LLamaToken token)
- </code></pre>
- <h4 id="parameters_74">Parameters</h4>
- <p><code>model</code> <a href="../llama.native.safellamamodelhandle/">SafeLlamaModelHandle</a><br></p>
- <p><code>token</code> <a href="../llama.native.llamatoken/">LLamaToken</a><br></p>
- <h4 id="returns_51">Returns</h4>
- <p><a href="https://docs.microsoft.com/en-us/dotnet/api/system.single">Single</a><br></p>
- <h3 id="llama_token_get_typesafellamamodelhandle-llamatoken"><strong>llama_token_get_type(SafeLlamaModelHandle, LLamaToken)</strong></h3>
- <pre><code class="language-csharp">public static LLamaTokenType llama_token_get_type(SafeLlamaModelHandle model, LLamaToken token)
- </code></pre>
- <h4 id="parameters_75">Parameters</h4>
- <p><code>model</code> <a href="../llama.native.safellamamodelhandle/">SafeLlamaModelHandle</a><br></p>
- <p><code>token</code> <a href="../llama.native.llamatoken/">LLamaToken</a><br></p>
- <h4 id="returns_52">Returns</h4>
- <p><a href="../llama.native.llamatokentype/">LLamaTokenType</a><br></p>
- <h3 id="llama_n_ctxsafellamacontexthandle"><strong>llama_n_ctx(SafeLLamaContextHandle)</strong></h3>
- <p>Get the size of the context window for the model for this context</p>
- <pre><code class="language-csharp">public static uint llama_n_ctx(SafeLLamaContextHandle ctx)
- </code></pre>
- <h4 id="parameters_76">Parameters</h4>
- <p><code>ctx</code> <a href="../llama.native.safellamacontexthandle/">SafeLLamaContextHandle</a><br></p>
- <h4 id="returns_53">Returns</h4>
- <p><a href="https://docs.microsoft.com/en-us/dotnet/api/system.uint32">UInt32</a><br></p>
- <h3 id="llama_n_batchsafellamacontexthandle"><strong>llama_n_batch(SafeLLamaContextHandle)</strong></h3>
- <p>Get the batch size for this context</p>
- <pre><code class="language-csharp">public static uint llama_n_batch(SafeLLamaContextHandle ctx)
- </code></pre>
- <h4 id="parameters_77">Parameters</h4>
- <p><code>ctx</code> <a href="../llama.native.safellamacontexthandle/">SafeLLamaContextHandle</a><br></p>
- <h4 id="returns_54">Returns</h4>
- <p><a href="https://docs.microsoft.com/en-us/dotnet/api/system.uint32">UInt32</a><br></p>
- <h3 id="llama_get_logitssafellamacontexthandle"><strong>llama_get_logits(SafeLLamaContextHandle)</strong></h3>
- <p>Token logits obtained from the last call to llama_decode
- The logits for the last token are stored in the last row
- Can be mutated in order to change the probabilities of the next token.<br>
- Rows: n_tokens<br>
- Cols: n_vocab</p>
- <pre><code class="language-csharp">public static Single* llama_get_logits(SafeLLamaContextHandle ctx)
- </code></pre>
- <h4 id="parameters_78">Parameters</h4>
- <p><code>ctx</code> <a href="../llama.native.safellamacontexthandle/">SafeLLamaContextHandle</a><br></p>
- <h4 id="returns_55">Returns</h4>
- <p><a href="https://docs.microsoft.com/en-us/dotnet/api/system.single*">Single*</a><br></p>
- <h3 id="llama_get_logits_ithsafellamacontexthandle-int32"><strong>llama_get_logits_ith(SafeLLamaContextHandle, Int32)</strong></h3>
- <p>Logits for the ith token. Equivalent to: llama_get_logits(ctx) + i*n_vocab</p>
- <pre><code class="language-csharp">public static Single* llama_get_logits_ith(SafeLLamaContextHandle ctx, int i)
- </code></pre>
- <h4 id="parameters_79">Parameters</h4>
- <p><code>ctx</code> <a href="../llama.native.safellamacontexthandle/">SafeLLamaContextHandle</a><br></p>
- <p><code>i</code> <a href="https://docs.microsoft.com/en-us/dotnet/api/system.int32">Int32</a><br></p>
- <h4 id="returns_56">Returns</h4>
- <p><a href="https://docs.microsoft.com/en-us/dotnet/api/system.single*">Single*</a><br></p>
- <h3 id="llama_get_embeddings_ithsafellamacontexthandle-int32"><strong>llama_get_embeddings_ith(SafeLLamaContextHandle, Int32)</strong></h3>
- <p>Get the embeddings for the ith sequence. Equivalent to: llama_get_embeddings(ctx) + i*n_embd</p>
- <pre><code class="language-csharp">public static Single* llama_get_embeddings_ith(SafeLLamaContextHandle ctx, int i)
- </code></pre>
- <h4 id="parameters_80">Parameters</h4>
- <p><code>ctx</code> <a href="../llama.native.safellamacontexthandle/">SafeLLamaContextHandle</a><br></p>
- <p><code>i</code> <a href="https://docs.microsoft.com/en-us/dotnet/api/system.int32">Int32</a><br></p>
- <h4 id="returns_57">Returns</h4>
- <p><a href="https://docs.microsoft.com/en-us/dotnet/api/system.single*">Single*</a><br></p>
-
-
-
-
-
-
- </article>
- </div>
-
-
- </div>
-
- </main>
-
- <footer class="md-footer">
-
- <div class="md-footer-meta md-typeset">
- <div class="md-footer-meta__inner md-grid">
- <div class="md-copyright">
-
-
- Made with
- <a href="https://squidfunk.github.io/mkdocs-material/" target="_blank" rel="noopener">
- Material for MkDocs
- </a>
-
- </div>
-
- </div>
- </div>
- </footer>
-
- </div>
- <div class="md-dialog" data-md-component="dialog">
- <div class="md-dialog__inner md-typeset"></div>
- </div>
-
- <script id="__config" type="application/json">{"base": "../..", "features": [], "search": "../../assets/javascripts/workers/search.74e28a9f.min.js", "translations": {"clipboard.copied": "Copied to clipboard", "clipboard.copy": "Copy to clipboard", "search.result.more.one": "1 more on this page", "search.result.more.other": "# more on this page", "search.result.none": "No matching documents", "search.result.one": "1 matching document", "search.result.other": "# matching documents", "search.result.placeholder": "Type to start searching", "search.result.term.missing": "Missing", "select.version": "Select version"}, "version": {"provider": "mike"}}</script>
-
-
- <script src="../../assets/javascripts/bundle.220ee61c.min.js"></script>
-
-
- </body>
- </html>
|