1 Star 0 Fork 66

xiajingze/llvm

forked from src-openEuler/llvm 
加入 Gitee
与超过 1200万 开发者一起发现、参与优秀开源项目,私有仓库也完全免费 :)
免费加入
文件
该仓库未声明开源许可证文件(LICENSE),使用请关注具体项目描述及其代码上游依赖。
克隆/下载
0031-ACPO-ACPO-Infrastructure.patch 239.90 KB
一键复制 编辑 原始数据 按行查看 历史
12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736173717381739174017411742174317441745174617471748174917501751175217531754175517561757175817591760176117621763176417651766176717681769177017711772177317741775177617771778177917801781178217831784178517861787178817891790179117921793179417951796179717981799180018011802180318041805180618071808180918101811181218131814181518161817181818191820182118221823182418251826182718281829183018311832183318341835183618371838183918401841184218431844184518461847184818491850185118521853185418551856185718581859186018611862186318641865186618671868186918701871187218731874187518761877187818791880188118821883188418851886188718881889189018911892189318941895189618971898189919001901190219031904190519061907190819091910191119121913191419151916191719181919192019211922192319241925192619271928192919301931193219331934193519361937193819391940194119421943194419451946194719481949195019511952195319541955195619571958195919601961196219631964196519661967196819691970197119721973197419751976197719781979198019811982198319841985198619871988198919901991199219931994199519961997199819992000200120022003200420052006200720082009201020112012201320142015201620172018201920202021202220232024202520262027202820292030203120322033203420352036203720382039204020412042204320442045204620472048204920502051205220532054205520562057205820592060206120622063206420652066206720682069207020712072207320742075207620772078207920802081208220832084208520862087208820892090209120922093209420952096209720982099210021012102210321042105210621072108210921102111211221132114211521162117211821192120212121222123212421252126212721282129213021312132213321342135213621372138213921402141214221432144214521462147214821492150215121522153215421552156215721582159216021612162216321642165216621672168216921702171217221732174217521762177217821792180218121822183218421852186218721882189219021912192219321942195219621972198219922002201220222032204220522062207220822092210221122122213221422152216221722182219222022212222222322242225222622272228222922302231223222332234223522362237223822392240224122422243224422452246224722482249225022512252225322542255225622572258225922602261226222632264226522662267226822692270227122722273227422752276227722782279228022812282228322842285228622872288228922902291229222932294229522962297229822992300230123022303230423052306230723082309231023112312231323142315231623172318231923202321232223232324232523262327232823292330233123322333233423352336233723382339234023412342234323442345234623472348234923502351235223532354235523562357235823592360236123622363236423652366236723682369237023712372237323742375237623772378237923802381238223832384238523862387238823892390239123922393239423952396239723982399240024012402240324042405240624072408240924102411241224132414241524162417241824192420242124222423242424252426242724282429243024312432243324342435243624372438243924402441244224432444244524462447244824492450245124522453245424552456245724582459246024612462246324642465246624672468246924702471247224732474247524762477247824792480248124822483248424852486248724882489249024912492249324942495249624972498249925002501250225032504250525062507250825092510251125122513251425152516251725182519252025212522252325242525252625272528252925302531253225332534253525362537253825392540254125422543254425452546254725482549255025512552255325542555255625572558255925602561256225632564256525662567256825692570257125722573257425752576257725782579258025812582258325842585258625872588258925902591259225932594259525962597259825992600260126022603260426052606260726082609261026112612261326142615261626172618261926202621262226232624262526262627262826292630263126322633263426352636263726382639264026412642264326442645264626472648264926502651265226532654265526562657265826592660266126622663266426652666266726682669267026712672267326742675267626772678267926802681268226832684268526862687268826892690269126922693269426952696269726982699270027012702270327042705270627072708270927102711271227132714271527162717271827192720272127222723272427252726272727282729273027312732273327342735273627372738273927402741274227432744274527462747274827492750275127522753275427552756275727582759276027612762276327642765276627672768276927702771277227732774277527762777277827792780278127822783278427852786278727882789279027912792279327942795279627972798279928002801280228032804280528062807280828092810281128122813281428152816281728182819282028212822282328242825282628272828282928302831283228332834283528362837283828392840284128422843284428452846284728482849285028512852285328542855285628572858285928602861286228632864286528662867286828692870287128722873287428752876287728782879288028812882288328842885288628872888288928902891289228932894289528962897289828992900290129022903290429052906290729082909291029112912291329142915291629172918291929202921292229232924292529262927292829292930293129322933293429352936293729382939294029412942294329442945294629472948294929502951295229532954295529562957295829592960296129622963296429652966296729682969297029712972297329742975297629772978297929802981298229832984298529862987298829892990299129922993299429952996299729982999300030013002300330043005300630073008300930103011301230133014301530163017301830193020302130223023302430253026302730283029303030313032303330343035303630373038303930403041304230433044304530463047304830493050305130523053305430553056305730583059306030613062306330643065306630673068306930703071307230733074307530763077307830793080308130823083308430853086308730883089309030913092309330943095309630973098309931003101310231033104310531063107310831093110311131123113311431153116311731183119312031213122312331243125312631273128312931303131313231333134313531363137313831393140314131423143314431453146314731483149315031513152315331543155315631573158315931603161316231633164316531663167316831693170317131723173317431753176317731783179318031813182318331843185318631873188318931903191319231933194319531963197319831993200320132023203320432053206320732083209321032113212321332143215321632173218321932203221322232233224322532263227322832293230323132323233323432353236323732383239324032413242324332443245324632473248324932503251325232533254325532563257325832593260326132623263326432653266326732683269327032713272327332743275327632773278327932803281328232833284328532863287328832893290329132923293329432953296329732983299330033013302330333043305330633073308330933103311331233133314331533163317331833193320332133223323332433253326332733283329333033313332333333343335333633373338333933403341334233433344334533463347334833493350335133523353335433553356335733583359336033613362336333643365336633673368336933703371337233733374337533763377337833793380338133823383338433853386338733883389339033913392339333943395339633973398339934003401340234033404340534063407340834093410341134123413341434153416341734183419342034213422342334243425342634273428342934303431343234333434343534363437343834393440344134423443344434453446344734483449345034513452345334543455345634573458345934603461346234633464346534663467346834693470347134723473347434753476347734783479348034813482348334843485348634873488348934903491349234933494349534963497349834993500350135023503350435053506350735083509351035113512351335143515351635173518351935203521352235233524352535263527352835293530353135323533353435353536353735383539354035413542354335443545354635473548354935503551355235533554355535563557355835593560356135623563356435653566356735683569357035713572357335743575357635773578357935803581358235833584358535863587358835893590359135923593359435953596359735983599360036013602360336043605360636073608360936103611361236133614361536163617361836193620362136223623362436253626362736283629363036313632363336343635363636373638363936403641364236433644364536463647364836493650365136523653365436553656365736583659366036613662366336643665366636673668366936703671367236733674367536763677367836793680368136823683368436853686368736883689369036913692369336943695369636973698369937003701370237033704370537063707370837093710371137123713371437153716371737183719372037213722372337243725372637273728372937303731373237333734373537363737373837393740374137423743374437453746374737483749375037513752375337543755375637573758375937603761376237633764376537663767376837693770377137723773377437753776377737783779378037813782378337843785378637873788378937903791379237933794379537963797379837993800380138023803380438053806380738083809381038113812381338143815381638173818381938203821382238233824382538263827382838293830383138323833383438353836383738383839384038413842384338443845384638473848384938503851385238533854385538563857385838593860386138623863386438653866386738683869387038713872387338743875387638773878387938803881388238833884388538863887388838893890389138923893389438953896389738983899390039013902390339043905390639073908390939103911391239133914391539163917391839193920392139223923392439253926392739283929393039313932393339343935393639373938393939403941394239433944394539463947394839493950395139523953395439553956395739583959396039613962396339643965396639673968396939703971397239733974397539763977397839793980398139823983398439853986398739883989399039913992399339943995399639973998399940004001400240034004400540064007400840094010401140124013401440154016401740184019402040214022402340244025402640274028402940304031403240334034403540364037403840394040404140424043404440454046404740484049405040514052405340544055405640574058405940604061406240634064406540664067406840694070407140724073407440754076407740784079408040814082408340844085408640874088408940904091409240934094409540964097409840994100410141024103410441054106410741084109411041114112411341144115411641174118411941204121412241234124412541264127412841294130413141324133413441354136413741384139414041414142414341444145414641474148414941504151415241534154415541564157415841594160416141624163416441654166416741684169417041714172417341744175417641774178417941804181418241834184418541864187418841894190419141924193419441954196419741984199420042014202420342044205420642074208420942104211421242134214421542164217421842194220422142224223422442254226422742284229423042314232423342344235423642374238423942404241424242434244424542464247424842494250425142524253425442554256425742584259426042614262426342644265426642674268426942704271427242734274427542764277427842794280428142824283428442854286428742884289429042914292429342944295429642974298429943004301430243034304430543064307430843094310431143124313431443154316431743184319432043214322432343244325432643274328432943304331433243334334433543364337433843394340434143424343434443454346434743484349435043514352435343544355435643574358435943604361436243634364436543664367436843694370437143724373437443754376437743784379438043814382438343844385438643874388438943904391439243934394439543964397439843994400440144024403440444054406440744084409441044114412441344144415441644174418441944204421442244234424442544264427442844294430443144324433443444354436443744384439444044414442444344444445444644474448444944504451445244534454445544564457445844594460446144624463446444654466446744684469447044714472447344744475447644774478447944804481448244834484448544864487448844894490449144924493449444954496449744984499450045014502450345044505450645074508450945104511451245134514451545164517451845194520452145224523452445254526452745284529453045314532453345344535453645374538453945404541454245434544454545464547454845494550455145524553455445554556455745584559456045614562456345644565456645674568456945704571457245734574457545764577457845794580458145824583458445854586458745884589459045914592459345944595459645974598459946004601460246034604460546064607460846094610461146124613461446154616461746184619462046214622462346244625462646274628462946304631463246334634463546364637463846394640464146424643464446454646464746484649465046514652465346544655465646574658465946604661466246634664466546664667466846694670467146724673467446754676467746784679468046814682468346844685468646874688468946904691469246934694469546964697469846994700470147024703470447054706470747084709471047114712471347144715471647174718471947204721472247234724472547264727472847294730473147324733473447354736473747384739474047414742474347444745474647474748474947504751475247534754475547564757475847594760476147624763476447654766476747684769477047714772477347744775477647774778477947804781478247834784478547864787478847894790479147924793479447954796479747984799480048014802480348044805480648074808480948104811481248134814481548164817481848194820482148224823482448254826482748284829483048314832483348344835483648374838483948404841484248434844484548464847484848494850485148524853485448554856485748584859486048614862486348644865486648674868486948704871487248734874487548764877487848794880488148824883488448854886488748884889489048914892489348944895489648974898489949004901490249034904490549064907490849094910491149124913491449154916491749184919492049214922492349244925492649274928492949304931493249334934493549364937493849394940494149424943494449454946494749484949495049514952495349544955495649574958495949604961496249634964496549664967496849694970497149724973497449754976497749784979498049814982498349844985498649874988498949904991499249934994499549964997499849995000500150025003500450055006500750085009501050115012501350145015501650175018501950205021502250235024502550265027502850295030503150325033503450355036503750385039504050415042504350445045504650475048504950505051505250535054505550565057505850595060506150625063506450655066506750685069507050715072507350745075507650775078507950805081508250835084508550865087508850895090509150925093509450955096509750985099510051015102510351045105510651075108510951105111511251135114511551165117511851195120512151225123512451255126512751285129513051315132513351345135513651375138513951405141514251435144514551465147514851495150515151525153515451555156515751585159516051615162516351645165516651675168516951705171517251735174517551765177517851795180518151825183518451855186518751885189519051915192519351945195519651975198519952005201520252035204520552065207520852095210521152125213521452155216521752185219522052215222522352245225522652275228522952305231523252335234523552365237523852395240524152425243524452455246524752485249525052515252525352545255525652575258525952605261526252635264526552665267526852695270527152725273527452755276527752785279528052815282528352845285528652875288528952905291529252935294529552965297529852995300530153025303530453055306530753085309531053115312531353145315531653175318531953205321532253235324532553265327532853295330533153325333533453355336533753385339534053415342534353445345534653475348534953505351535253535354535553565357535853595360536153625363536453655366536753685369537053715372537353745375537653775378537953805381538253835384538553865387538853895390539153925393539453955396539753985399540054015402540354045405540654075408540954105411541254135414541554165417541854195420542154225423542454255426542754285429543054315432543354345435543654375438543954405441544254435444544554465447544854495450545154525453545454555456545754585459546054615462546354645465546654675468546954705471547254735474547554765477547854795480548154825483548454855486548754885489549054915492549354945495549654975498549955005501550255035504550555065507550855095510551155125513551455155516551755185519552055215522552355245525552655275528552955305531553255335534553555365537553855395540554155425543554455455546554755485549555055515552555355545555555655575558555955605561556255635564556555665567556855695570557155725573557455755576557755785579558055815582558355845585558655875588558955905591559255935594559555965597559855995600560156025603560456055606560756085609561056115612561356145615561656175618561956205621562256235624562556265627562856295630563156325633563456355636563756385639564056415642564356445645564656475648564956505651565256535654565556565657565856595660566156625663566456655666566756685669567056715672567356745675567656775678567956805681568256835684568556865687568856895690569156925693569456955696569756985699570057015702570357045705570657075708570957105711571257135714571557165717571857195720572157225723572457255726572757285729573057315732573357345735573657375738573957405741574257435744574557465747574857495750575157525753575457555756575757585759576057615762576357645765576657675768576957705771577257735774577557765777577857795780578157825783578457855786578757885789579057915792579357945795579657975798579958005801580258035804580558065807580858095810581158125813581458155816581758185819582058215822582358245825582658275828582958305831583258335834583558365837583858395840584158425843584458455846584758485849585058515852585358545855585658575858585958605861586258635864586558665867586858695870587158725873587458755876587758785879588058815882588358845885588658875888588958905891589258935894589558965897589858995900590159025903590459055906590759085909591059115912591359145915591659175918591959205921592259235924592559265927592859295930593159325933593459355936593759385939594059415942594359445945594659475948594959505951595259535954595559565957595859595960596159625963596459655966596759685969597059715972597359745975597659775978597959805981598259835984598559865987598859895990599159925993599459955996599759985999600060016002600360046005600660076008600960106011601260136014601560166017601860196020602160226023602460256026602760286029603060316032603360346035603660376038603960406041604260436044604560466047604860496050605160526053605460556056605760586059606060616062606360646065606660676068606960706071607260736074607560766077607860796080608160826083608460856086608760886089609060916092609360946095609660976098609961006101610261036104610561066107610861096110611161126113611461156116611761186119612061216122612361246125612661276128612961306131613261336134613561366137613861396140614161426143614461456146614761486149615061516152615361546155615661576158615961606161616261636164616561666167616861696170617161726173
From 9773a0bfd29580c31867afccada947457617628e Mon Sep 17 00:00:00 2001
From: tsczajkowski <[email protected]>
Date: Thu, 22 Aug 2024 23:56:11 -0400
Subject: [PATCH] [ACPO] ACPO Infrastructure
This change introduces ACPO ML infrastructure to enable use of ML models
within LLVM compiler using a simple interface.
---
ACPO_README.md | 36 +
llvm/CMakeLists.txt | 4 +
.../llvm/Analysis/ACPOCollectFeatures.h | 296 ++++
llvm/include/llvm/Analysis/ACPOMLInterface.h | 482 ++++++
llvm/include/llvm/Analysis/ACPOModel.h | 122 ++
llvm/include/llvm/Analysis/ACPOModelRunner.h | 39 +
llvm/include/llvm/Analysis/AOTModelRunner.h | 203 +++
llvm/include/llvm/Analysis/CallHeight.h | 72 +
llvm/include/llvm/Analysis/DumpCallsite.h | 27 +
llvm/include/llvm/Analysis/DumpFeature.h | 194 +++
llvm/include/llvm/Analysis/LoopInfo.h | 11 +
.../llvm/Analysis/ModelDataCollector.h | 108 ++
llvm/include/llvm/InitializePasses.h | 8 +
llvm/lib/Analysis/ACPOCollectFeatures.cpp | 1258 +++++++++++++++
llvm/lib/Analysis/ACPOMLInterface.cpp | 1405 +++++++++++++++++
llvm/lib/Analysis/ACPOModel.cpp | 63 +
llvm/lib/Analysis/CMakeLists.txt | 32 +
llvm/lib/Analysis/CallHeight.cpp | 89 ++
llvm/lib/Analysis/DumpCallsite.cpp | 82 +
llvm/lib/Analysis/DumpFeature.cpp | 575 +++++++
llvm/lib/Analysis/ModelDataCollector.cpp | 350 ++++
llvm/lib/CodeGen/CMakeLists.txt | 2 +-
llvm/lib/IR/AsmWriter.cpp | 220 ++-
llvm/lib/Passes/PassBuilder.cpp | 6 +
llvm/lib/Passes/PassBuilderPipelines.cpp | 14 +
llvm/lib/Passes/PassRegistry.def | 10 +
26 files changed, 5679 insertions(+), 29 deletions(-)
create mode 100644 ACPO_README.md
create mode 100644 llvm/include/llvm/Analysis/ACPOCollectFeatures.h
create mode 100644 llvm/include/llvm/Analysis/ACPOMLInterface.h
create mode 100644 llvm/include/llvm/Analysis/ACPOModel.h
create mode 100644 llvm/include/llvm/Analysis/ACPOModelRunner.h
create mode 100644 llvm/include/llvm/Analysis/AOTModelRunner.h
create mode 100644 llvm/include/llvm/Analysis/CallHeight.h
create mode 100644 llvm/include/llvm/Analysis/DumpCallsite.h
create mode 100644 llvm/include/llvm/Analysis/DumpFeature.h
create mode 100644 llvm/include/llvm/Analysis/ModelDataCollector.h
create mode 100644 llvm/lib/Analysis/ACPOCollectFeatures.cpp
create mode 100644 llvm/lib/Analysis/ACPOMLInterface.cpp
create mode 100644 llvm/lib/Analysis/ACPOModel.cpp
create mode 100644 llvm/lib/Analysis/CallHeight.cpp
create mode 100644 llvm/lib/Analysis/DumpCallsite.cpp
create mode 100644 llvm/lib/Analysis/DumpFeature.cpp
create mode 100644 llvm/lib/Analysis/ModelDataCollector.cpp
diff --git a/llvm/CMakeLists.txt b/llvm/CMakeLists.txt
index 79de9eb2e3e7..b0afb47a7243 100644
--- a/llvm/CMakeLists.txt
+++ b/llvm/CMakeLists.txt
@@ -1001,6 +1001,9 @@ endif()
#
set(TENSORFLOW_AOT_PATH "" CACHE PATH "Path to TensorFlow pip install dir")
+set(LLVM_INLINER_MODEL_PATH "" CACHE PATH "Path to the inliner model")
+set(ACPO_AOT OFF CACHE BOOL "Whether or not ACPO AOT is enabled")
+
if (NOT TENSORFLOW_AOT_PATH STREQUAL "")
set(LLVM_HAVE_TF_AOT "ON" CACHE BOOL "Tensorflow AOT available")
set(TENSORFLOW_AOT_COMPILER
@@ -1009,6 +1012,7 @@ if (NOT TENSORFLOW_AOT_PATH STREQUAL "")
include_directories(${TENSORFLOW_AOT_PATH}/include)
add_subdirectory(${TENSORFLOW_AOT_PATH}/xla_aot_runtime_src
${CMAKE_ARCHIVE_OUTPUT_DIRECTORY}/tf_runtime)
+ target_compile_definitions(tf_xla_runtime_objects PUBLIC EIGEN_NEON_GEBP_NR=4) # Fix for issue https://github.com/tensorflow/tensorflow/issues/58481
install(TARGETS tf_xla_runtime EXPORT LLVMExports
ARCHIVE DESTINATION lib${LLVM_LIBDIR_SUFFIX} COMPONENT tf_xla_runtime)
set_property(GLOBAL APPEND PROPERTY LLVM_EXPORTS tf_xla_runtime)
diff --git a/llvm/include/llvm/Analysis/ACPOCollectFeatures.h b/llvm/include/llvm/Analysis/ACPOCollectFeatures.h
new file mode 100644
index 000000000000..ec62b559542d
--- /dev/null
+++ b/llvm/include/llvm/Analysis/ACPOCollectFeatures.h
@@ -0,0 +1,296 @@
+//===- ACPOCollectFeatures.h - ACPO Class for Feature Collection ----------===//
+//
+// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
+// See https://llvm.org/LICENSE.txt for license information.
+// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
+//
+//===----------------------------------------------------------------------===//
+//
+// This header file defines the type, scope, and number of features to be
+// collected on a given ACPOModel class from all available features.
+//
+//===----------------------------------------------------------------------===//
+#ifndef LLVM_ANALYSIS_ACPOCOLLECTFEATURES_H
+#define LLVM_ANALYSIS_ACPOCOLLECTFEATURES_H
+#include "llvm/Analysis/InlineAdvisor.h"
+#include "llvm/Analysis/LoopInfo.h"
+#include "llvm/IR/Function.h"
+#include "llvm/IR/Instructions.h"
+#include "llvm/IR/PassManager.h"
+
+#include <ios>
+#include <memory>
+#include <ostream>
+#include <set>
+#include <sstream>
+#include <stdio.h>
+#include <stdlib.h>
+#include <unordered_map>
+#include <utility>
+#include <vector>
+
+namespace llvm {
+class ACPOCollectFeatures {
+public:
+ // A feature is related to one of the following scope
+ enum class Scope {
+ Module,
+ Function,
+ Loop,
+ Callgraph,
+ CallSite,
+ NumOfScope,
+ };
+
+ // In the future as more features are added, features can be calculated
+ // simultanously.
+ // Suppose feature A and B could be calculated in the same loop,
+ // then it would make sense to calculate both the features at the same time
+ // and save it in a cache system
+ // (which could be implemented similarly like Dumpfeatures.h/cpp).
+ enum class GroupID {
+ EdgeNodeCount,
+ FPIRelated,
+ HotColdCallSite,
+ InlineCostFeatureGroup,
+ ACPOFIExtendedFeatures,
+ NumOfGroupID
+ };
+
+ // List of features we support to be calculated.
+ // (1) For each feature there should be a corresponding scope on which it
+ // depends on for calculating.
+ // (2) A feature may belong in a group for which those features could be
+ // calculated together.
+ // (3) Once you decided to add a feature you should register it to all the
+ // static maps in the .cpp file. Except for some special indicator enum's
+ // like InlineCostFeatureGroupBegin/End
+ enum class FeatureIndex {
+ // Begin: InlineCostFeatureGroup
+ InlineCostFeatureGroupBegin,
+ SROASavings,
+ SROALosses,
+ LoadElimination,
+ CallPenalty,
+ CallArgumentSetup,
+ LoadRelativeIntrinsic,
+ LoweredCallArgSetup,
+ IndirectCallPenalty,
+ JumpTablePenalty,
+ CaseClusterPenalty,
+ SwitchPenalty,
+ UnsimplifiedCommonInstructions,
+ NumLoops,
+ DeadBlocks,
+ SimplifiedInstructions,
+ ConstantArgs,
+ ConstantOffsetPtrArgs,
+ CallSiteCost,
+ ColdCcPenalty,
+ LastCallToStaticBonus,
+ IsMultipleBlocks,
+ NestedInlines,
+ NestedInlineCostEstimate,
+ Threshold,
+ InlineCostFeatureGroupEnd,
+ // End: InlineCostFeatureGroup
+
+ // Begin: FPIRelated
+ BasicBlockCount,
+ BlocksReachedFromConditionalInstruction,
+ Uses,
+ // End: FPIRelated
+
+ // Begin: EdgeNodeCount
+ EdgeCount,
+ NodeCount,
+ // End: EdgeNodeCount
+
+ // Begin: HotColdCallsite
+ ColdCallSite,
+ HotCallSite,
+ // End: HotColdCallsite
+
+ // Begin: ACPOFIExtendedFeatures
+ ACPOFIExtendedFeaturesNamedFeatureBegin,
+ ACPOFIExtendedFeaturesInitialSize,
+ ACPOFIExtendedFeaturesBlocks,
+ ACPOFIExtendedFeaturesCalls,
+ ACPOFIExtendedFeaturesIsLocal,
+ ACPOFIExtendedFeaturesIsLinkOnceODR,
+ ACPOFIExtendedFeaturesIsLinkOnce,
+ ACPOFIExtendedFeaturesLoops,
+ ACPOFIExtendedFeaturesMaxLoopDepth,
+ ACPOFIExtendedFeaturesMaxDomTreeLevel,
+ ACPOFIExtendedFeaturesPtrArgs,
+ ACPOFIExtendedFeaturesPtrCallee,
+ ACPOFIExtendedFeaturesCallReturnPtr,
+ ACPOFIExtendedFeaturesConditionalBranch,
+ ACPOFIExtendedFeaturesCBwithArg,
+ ACPOFIExtendedFeaturesCallerHeight,
+ ACPOFIExtendedFeaturesCallUsage,
+ ACPOFIExtendedFeaturesIsRecursive,
+ ACPOFIExtendedFeaturesNumCallsiteInLoop,
+ ACPOFIExtendedFeaturesNumOfCallUsesInLoop,
+ ACPOFIExtendedFeaturesEntryBlockFreq,
+ ACPOFIExtendedFeaturesMaxCallsiteBlockFreq,
+ ACPOFIExtendedFeaturesNamedFeatureEnd,
+ ACPOFIExtendedFeaturesFloatFeatureBegin,
+ ACPOFIExtendedFeaturesInstructionPerBlock,
+ ACPOFIExtendedFeaturesSuccessorPerBlock,
+ ACPOFIExtendedFeaturesAvgVecInstr,
+ ACPOFIExtendedFeaturesAvgNestedLoopLevel,
+ ACPOFIExtendedFeaturesInstrPerLoop,
+ ACPOFIExtendedFeaturesBlockWithMultipleSuccecorsPerLoop,
+ ACPOFIExtendedFeaturesFloatFeatureEnd,
+ // End: ACPOFIExtendedFeatures
+
+ CallerBlockFreq,
+ CallSiteHeight,
+ ConstantParam,
+ CostEstimate,
+ LoopLevel,
+ MandatoryKind,
+ MandatoryOnly,
+ OptCode,
+ IsIndirectCall,
+ IsInInnerLoop,
+ IsMustTailCall,
+ IsTailCall,
+ NumOfFeatures
+ };
+
+ struct AnalysisManagers {
+ FunctionAnalysisManager *FAM = nullptr;
+ ModuleAnalysisManager *MAM = nullptr;
+ };
+
+ // ScopeInfo is a struct that contains the correpsonding needed information to
+ // calculate the corresponding feature.
+ struct ScopeInfo {
+ Function *F = nullptr;
+ CallBase *CB = nullptr;
+ BasicBlock *BB = nullptr;
+ Module *M = nullptr;
+ Loop *L = nullptr;
+ // Can add Instructions or other types later.
+ };
+
+ struct OtherInfo {
+ bool MandatoryOnly = false;
+ InlineAdvisor *IA = nullptr;
+ };
+
+ // FeatureInfo should contain all the relevant information to calculate
+ // the corresponding FeatureIndex.
+ struct FeatureInfo {
+ // When Idx = NumOfFeatures. We assume this is a global FeatureInfo.
+ FeatureIndex Idx;
+ // Once we have the Idx we should know the following two attribute.
+ // Scope ScopeIdx //
+ // GroupID Group //
+ AnalysisManagers Managers;
+ ScopeInfo SI;
+ OtherInfo OI;
+ };
+
+ using FeatureValueMap = std::unordered_map<FeatureIndex, std::string>;
+ using FeatureInfoMap = std::unordered_map<FeatureIndex, FeatureInfo>;
+ using FeaturesInfo = std::vector<FeatureInfo>;
+ using Scopes = std::vector<Scope>;
+ using GroupIDs = std::vector<GroupID>;
+ typedef void (*CalculateFeatureFunction)(ACPOCollectFeatures &,
+ const FeatureInfo &);
+
+ // Constructors/Destructors
+ ACPOCollectFeatures();
+ ACPOCollectFeatures(FeatureInfo GlobalInfo);
+ ~ACPOCollectFeatures();
+
+ // Setters/getters
+ void setFeatureValue(FeatureIndex Idx, std::string Val);
+
+ void setFeatureInfo(FeatureIndex Idx, FeatureInfo Info);
+
+ void setFeatureValueAndInfo(FeatureIndex Idx, FeatureInfo Info,
+ std::string Val);
+
+ void setGlobalFeatureInfo(FeatureInfo &Info);
+
+ std::string getFeature(FeatureIndex Idx) const;
+
+ // Check if the feature is alrady calculated.
+ bool containsFeature(FeatureIndex);
+ bool containsFeature(GroupID);
+
+ static std::string getFeatureName(FeatureIndex Idx);
+ static GroupID getFeatureGroup(FeatureIndex Idx);
+ static Scope getFeatureScope(FeatureIndex Idx);
+ static std::set<FeatureIndex> getGroupFeatures(GroupID Group);
+ static std::set<FeatureIndex> getScopeFeatures(Scope S);
+
+ void clearFeatureValueMap();
+ bool registeredFeature(FeatureIndex Idx) const;
+
+ // Calculate and Return the feature values specified by FeaturesInfo
+ FeatureValueMap getFeaturesPair(FeaturesInfo Features);
+
+ // Calculate and Return the feature values specified from [Beg, End)
+ // TODO: Make a similar method for Scopes and GroupIDs
+ FeatureValueMap getFeaturesPair(FeatureIndex Beg, FeatureIndex End);
+
+ // Calculate and Return the feature values specified by Scope.
+ FeatureValueMap getFeaturesPair(Scopes);
+
+ // Calculate and Return the feature values specified by GroupID.
+ FeatureValueMap getFeaturesPair(GroupIDs);
+
+ static InlineAdvisor::MandatoryInliningKind
+ getMandatoryKind(CallBase &CB, FunctionAnalysisManager &FAM,
+ OptimizationRemarkEmitter &ORE);
+
+ static void clearFunctionLevel();
+ static void insertFunctionLevel(const Function *, unsigned);
+ static std::optional<unsigned> getFunctionLevel(const Function *);
+
+private:
+ // Global mappings.
+ // FeatureIndexToName and FeatureIndexToScope should be a one to one mapping.
+ static const std::unordered_map<FeatureIndex, std::string> FeatureIndexToName;
+ static const std::unordered_map<FeatureIndex, Scope> FeatureIndexToScope;
+ static const std::unordered_map<FeatureIndex, GroupID> FeatureIndexToGroup;
+ static const std::multimap<GroupID, FeatureIndex> GroupToFeatureIndices;
+ static const std::multimap<Scope, FeatureIndex> ScopeToFeatureIndices;
+ // The CalculateFeatureMap maps each feature to a corresponding function that
+ // calculates the feature and also sets the feature value inside
+ // FeatureValues field.
+ static const std::unordered_map<FeatureIndex, CalculateFeatureFunction>
+ CalculateFeatureMap;
+
+ // TODO:
+ // Implement the cache systems here. See similar example in DumpFeature.cpp
+ // Notice we've only cached the FunctionLevels.
+ // But in the future this should be generalized for all features.
+ // One way to do this is to define a map from FeatureIndex -> Mapping.
+ // Inside this mapping, the key should be the Scope and a set of analysis it
+ // depends on.
+
+ static std::map<const Function *, unsigned> FunctionLevels;
+
+ // Saved FeatureValues when we collect the features.
+ FeatureValueMap FeatureToValue;
+ FeatureInfoMap FeatureToInfo;
+ FeatureInfo GlobalFeatureInfo;
+};
+
+ACPOCollectFeatures::FeatureIndex operator+(ACPOCollectFeatures::FeatureIndex,
+ int);
+ACPOCollectFeatures::FeatureIndex operator-(ACPOCollectFeatures::FeatureIndex,
+ int);
+ACPOCollectFeatures::FeatureIndex &
+operator++(ACPOCollectFeatures::FeatureIndex &);
+ACPOCollectFeatures::FeatureIndex
+operator++(ACPOCollectFeatures::FeatureIndex &, int);
+
+} // namespace llvm
+#endif // LLVM_ANALYSIS_ACPOCOLLECTFEATURES_H
diff --git a/llvm/include/llvm/Analysis/ACPOMLInterface.h b/llvm/include/llvm/Analysis/ACPOMLInterface.h
new file mode 100644
index 000000000000..996f27ee32ba
--- /dev/null
+++ b/llvm/include/llvm/Analysis/ACPOMLInterface.h
@@ -0,0 +1,482 @@
+//===- ACPOMLInterface.h - AI-Enabled Continuous Program Optimization -----===//
+//
+// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
+// See https://llvm.org/LICENSE.txt for license information.
+// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
+//
+// Copyright (C) 2021-2022. Huawei Technologies Co., Ltd. All rights reserved.
+//
+//===----------------------------------------------------------------------===//
+
+#ifndef LLVM_ANALYSIS_ACPOML_INTERFACE_H
+#define LLVM_ANALYSIS_ACPOML_INTERFACE_H
+
+#include "llvm/Analysis/ACPOModelRunner.h"
+#include "llvm/IR/Constants.h"
+#include "llvm/IR/LLVMContext.h"
+#include "llvm/Support/Program.h"
+
+#include <cstddef>
+#include <ios>
+#include <memory>
+#include <sstream>
+#include <stdio.h>
+#include <stdlib.h>
+#include <unordered_map>
+#include <utility>
+#include <vector>
+
+namespace llvm {
+
+class ACPOModelRunner;
+
+// This is class for storing information about a model.
+class Model {
+public:
+ // Constructors
+ Model() : NumFeatures{1}, NumOutputs{1} {}
+ Model(std::size_t NumFeatures) : NumFeatures{NumFeatures}, NumOutputs{1} {}
+ Model(std::size_t NumFeatures, int NumOutputs)
+ : NumFeatures{NumFeatures}, NumOutputs{NumOutputs} {}
+
+ // Getters/Setters
+ std::size_t getNumFeatures() const { return NumFeatures; }
+ void setNumFeatures(int NumFeatures) { this->NumFeatures = NumFeatures; }
+
+ int getNumOutputs() const { return NumOutputs; }
+ void setNumOutputs(int NumOutputs) { this->NumOutputs = NumOutputs; }
+
+ std::string getSignature() const { return Signature; }
+ void setSignature(std::string Signature) { this->Signature = Signature; }
+
+ // Register a feature into the NametoID and IDToIndex maps.
+ bool registerFeature(std::string FeatureName, uint64_t FeatureID, int Index);
+
+ // Register an input into the map.
+ bool registerInput(std::string InputName, std::string InputType);
+
+ // Register an output into the map.
+ bool registerOutput(std::string OutputName, std::string OutputType);
+
+ // Return the index of a feature within the feature list used by inference().
+ int getIndex(uint64_t FeatureID) const;
+ int getIndex(std::string FeatureName) const;
+
+ // Return the name of a feature within the feature list used by inference().
+ std::string getName(uint64_t FeatureID) const;
+
+ // Return true if output name exists.
+ bool checkOutputExists(std::string OutputName) const;
+
+ // Return the type of an input given its name.
+ std::string getInputType(std::string OutputName) const;
+
+ // Return the type of an output given its name.
+ std::string getOutputType(std::string OutputName) const;
+
+private:
+ std::size_t NumFeatures;
+ int NumOutputs;
+ std::string Signature;
+ std::unordered_map<std::string, uint64_t> NameToID;
+ std::unordered_map<uint64_t, std::string> IDToName;
+ std::unordered_map<uint64_t, int> IDToIndex;
+
+ // A map from input name to input type
+ std::unordered_map<std::string, std::string> InputMap;
+
+ // A map from output name to output type
+ std::unordered_map<std::string, std::string> OutputMap;
+};
+
+// This is the base class to define an interface with an ML framework.
+class ACPOMLInterface {
+public:
+ // Constructor/Destructor.
+ ACPOMLInterface() {}
+ virtual ~ACPOMLInterface() {}
+
+ // Getters/Setters
+ bool isInitialized() const { return Initialized; }
+ void setInitialized(bool Val) { Initialized = Val; }
+
+ // Interface methods.
+ // Return the next available ID for a feature.
+ virtual uint64_t assignID() = 0;
+
+ // Load a model by reading from the specified file.
+ // Return false if the operation failed.
+ virtual bool loadModel(std::string ModelSpecFile) = 0;
+
+ // Insert a new model into the model map.
+ virtual bool registerModel(std::string ModelName, int NumFeatures) = 0;
+ virtual bool registerModel(std::string ModelName, int NumFeatures,
+ int NumOutputs) = 0;
+
+ // Register a new feature for a given model.
+ virtual bool registerFeature(std::string ModelName, std::string FeatureName,
+ int Index) = 0;
+
+ // Register a new output for a given model.
+ virtual bool registerOutput(std::string ModelName, std::string OutputName,
+ std::string OutputType) = 0;
+
+ // Specify how many models are currently live in ML framework memory.
+ virtual int getNumLoadedModels() = 0;
+
+ // Specify the input file to use as IR to be passed to the model (however
+ // it is processed afterwards). Return false if the operation failed.
+ virtual bool defineInputIR(std::string Filename) = 0;
+
+ // Specify a custom feature for a model to use as input at the next model
+ // invocation. Return false if the operation failed.
+ virtual bool setCustomFeature(std::string ModelName, uint64_t FeatureID,
+ int FeatureValue) = 0;
+ virtual bool setCustomFeature(std::string ModelName, uint64_t FeatureID,
+ int64_t FeatureValue) = 0;
+ virtual bool setCustomFeature(std::string ModelName, uint64_t FeatureID,
+ double FeatureValue) = 0;
+ virtual bool setCustomFeature(std::string ModelName, uint64_t FeatureID,
+ float FeatureValue) = 0;
+ virtual bool setCustomFeature(std::string ModelName, uint64_t FeatureID,
+ bool FeatureValue) = 0;
+ virtual bool setCustomFeature(std::string ModelName, std::string FeatureName,
+ int FeatureValue) = 0;
+ virtual bool setCustomFeature(std::string ModelName, std::string FeatureName,
+ int64_t FeatureValue) = 0;
+ virtual bool setCustomFeature(std::string ModelName, std::string FeatureName,
+ double FeatureValue) = 0;
+ virtual bool setCustomFeature(std::string ModelName, std::string FeatureName,
+ float FeatureValue) = 0;
+ virtual bool setCustomFeature(std::string ModelName, std::string FeatureName,
+ bool FeatureValue) = 0;
+
+ // Replace all features with the given feature values.
+ // Activate the specified model.
+ virtual bool initializeFeatures(
+ std::string ModelName,
+ const std::vector<std::pair<uint64_t, std::string>> &FeatureValues) = 0;
+
+ virtual bool
+ initializeFeatures(std::string ModelName,
+ const std::vector<std::pair<std::string, std::string>>
+ &FeatureValues) = 0;
+
+ // Set features with the specified feature values.
+ // Not changing the currently active model.
+ virtual bool setCustomFeatures(
+ std::string ModelName,
+ const std::vector<std::pair<uint64_t, std::string>> &FeatureValues) = 0;
+
+ virtual bool
+ setCustomFeatures(std::string ModelName,
+ const std::vector<std::pair<std::string, std::string>>
+ &FeatureValues) = 0;
+
+ // Run a model with specified name. Return false if the execution was not
+ // possible or an error was encountered.
+ virtual bool runModel(std::string ModelName) = 0;
+
+ // Return the type of an output within the model specified by the name.
+ virtual std::string getOutputType(std::string ModelName,
+ std::string OutputName) = 0;
+
+ // Return model results, based on the output name.
+ virtual int getModelResultI(std::string OutputName) = 0;
+ virtual int64_t getModelResultI64(std::string OutputName) = 0;
+ virtual float getModelResultF(std::string OutputName) = 0;
+ virtual double getModelResultD(std::string OutputName) = 0;
+ virtual bool getModelResultB(std::string OutputName) = 0;
+
+ // Get status of the ML interface. Return zero if succeeded.
+ virtual int getStatus() = 0;
+
+ // Free up memory taken by a model.
+ virtual bool releaseModel(std::string ModelName) = 0;
+
+ // Close interface when done. Return false if the command was not successful.
+ // In some cases this just requires a constructor for this class to be called,
+ // but in others, additional steps that require feedback may be needed.
+ virtual bool closeMLInterface() = 0;
+
+ // Set a flag to invoke closeMLInterface when the instance of the class is
+ // destroyed.
+ void setCloseOnDestruction() { CloseOnDestruction = true; }
+
+protected:
+ bool CloseOnDestruction = false;
+
+private:
+ bool Initialized = false;
+};
+
+class ACPOMLPythonInterface : public ACPOMLInterface {
+public:
+ ACPOMLPythonInterface();
+ virtual ~ACPOMLPythonInterface();
+
+ // Interface methods.
+ // Return the next available ID for a feature.
+ virtual uint64_t assignID() override;
+
+ // Load a model by reading from the specified file.
+ // Return false if the operation failed.
+ virtual bool loadModel(std::string ModelSpecFile) override;
+
+ // Insert a new model into the model map.
+ virtual bool registerModel(std::string ModelName, int NumFeatures) override;
+ virtual bool registerModel(std::string ModelName, int NumFeatures,
+ int NumOutputs) override;
+
+ // Register a new feature for a given model.
+ virtual bool registerFeature(std::string ModelName, std::string FeatureName,
+ int Index) override;
+
+ // Register a new output for a given model.
+ virtual bool registerOutput(std::string ModelName, std::string OutputName,
+ std::string OutputType) override;
+
+ // Specify how many models are currently live in ML framework memory.
+ virtual int getNumLoadedModels() override;
+
+ // Specify the input file to use as IR to be passed to the model (however
+ // it is processed afterwards). Return false if the operation failed.
+ virtual bool defineInputIR(std::string Filename) override;
+
+ // Specify a custom feature for a model to use as input at the next model
+ // invocation. Return false if the operation failed.
+ virtual bool setCustomFeature(std::string ModelName, uint64_t FeatureID,
+ int FeatureValue) override;
+ virtual bool setCustomFeature(std::string ModelName, uint64_t FeatureID,
+ int64_t FeatureValue) override;
+ virtual bool setCustomFeature(std::string ModelName, uint64_t FeatureID,
+ double FeatureValue) override;
+ virtual bool setCustomFeature(std::string ModelName, uint64_t FeatureID,
+ float FeatureValue) override;
+ virtual bool setCustomFeature(std::string ModelName, uint64_t FeatureID,
+ bool FeatureValue) override;
+
+ virtual bool setCustomFeature(std::string ModelName, std::string FeatureName,
+ int FeatureValue) override;
+ virtual bool setCustomFeature(std::string ModelName, std::string FeatureName,
+ int64_t FeatureValue) override;
+ virtual bool setCustomFeature(std::string ModelName, std::string FeatureName,
+ double FeatureValue) override;
+ virtual bool setCustomFeature(std::string ModelName, std::string FeatureName,
+ float FeatureValue) override;
+ virtual bool setCustomFeature(std::string ModelName, std::string FeatureName,
+ bool FeatureValue) override;
+
+ // Replace all features with the given feature values.
+ // Activate the specified model.
+ virtual bool
+ initializeFeatures(std::string ModelName,
+ const std::vector<std::pair<uint64_t, std::string>>
+ &FeatureValues) override;
+
+ virtual bool
+ initializeFeatures(std::string ModelName,
+ const std::vector<std::pair<std::string, std::string>>
+ &FeatureValues) override;
+
+ // Set features with the specified feature values.
+ // Not changing the currently active model.
+ virtual bool
+ setCustomFeatures(std::string ModelName,
+ const std::vector<std::pair<uint64_t, std::string>>
+ &FeatureValues) override;
+
+ virtual bool
+ setCustomFeatures(std::string ModelName,
+ const std::vector<std::pair<std::string, std::string>>
+ &FeatureValues) override;
+
+ // Run a model with the specified name. Return false if the execution was not
+ // possible or an error was encountered.
+ virtual bool runModel(std::string ModelName) override;
+
+ // Return the type of an output within the model specified by the name.
+ virtual std::string getOutputType(std::string ModelName,
+ std::string OutputName) override;
+
+ // Return model results, based on the output name.
+ virtual int getModelResultI(std::string OutputName) override;
+ virtual int64_t getModelResultI64(std::string OutputName) override;
+ virtual float getModelResultF(std::string OutputName) override;
+ virtual double getModelResultD(std::string OutputName) override;
+ virtual bool getModelResultB(std::string OutputName) override;
+
+ // Get status of the ML interface. Return zero if succeeded.
+ virtual int getStatus() override;
+
+ // Free up memory taken by a model.
+ virtual bool releaseModel(std::string ModelName) override;
+
+ // Close interface when done. Return false if the command was not successful.
+ // In some cases this just requires a constructor for this class to be called,
+ // but in others, additional steps that require feedback may be needed.
+ virtual bool closeMLInterface() override;
+
+protected:
+ void sendCommand(const std::string &Command);
+ void sendCommand(const std::vector<std::string> &Features);
+ std::string getResponse();
+ std::vector<std::string> tokenize(const std::string &Line);
+
+private:
+ llvm::sys::ProcessInfo SubProcess;
+ FILE *PipeOut = nullptr;
+ FILE *PipeIn = nullptr;
+
+ uint64_t NextID;
+
+ std::string CurrentlyActiveModel;
+
+ // Mapping model names to their corresponding Model
+ std::unordered_map<std::string, std::shared_ptr<Model>> ModelMap;
+};
+
+std::shared_ptr<ACPOMLInterface> createPersistentPythonMLIF();
+
+class ACPOMLCPPInterface : public ACPOMLInterface {
+public:
+ ACPOMLCPPInterface();
+ virtual ~ACPOMLCPPInterface();
+
+ // Interface methods.
+ // Return the next available ID for a feature.
+ virtual uint64_t assignID() override;
+
+ // Load a model by reading from the specified file.
+ // Return false if the operation failed.
+ // For ACPOMLCompiledInterface, loadCompiledModel() should be used instead.
+ virtual bool loadModel(std::string ModelSpecFile) override;
+
+ // Insert a new model into the model map.
+ virtual bool registerModel(std::string ModelName, int NumFeatures) override;
+ virtual bool registerModel(std::string ModelName, int NumFeatures,
+ int NumOutputs) override;
+
+ // Register a new feature for a given model.
+ virtual bool registerFeature(std::string ModelName, std::string FeatureName,
+ int Index) override;
+
+ // Register a new output for a given model.
+ virtual bool registerOutput(std::string ModelName, std::string OutputName,
+ std::string OutputType) override;
+
+ // Specify how many models are currently live in ML framework memory.
+ virtual int getNumLoadedModels() override;
+
+ // Specify the input file to use as IR to be passed to the model (however
+ // it is processed afterwards). Return false if the operation failed.
+ virtual bool defineInputIR(std::string Filename) override;
+
+ // Specify a custom feature for a model to use as input at the next model
+ // invocation. Return false if the operation failed.
+ virtual bool setCustomFeature(std::string ModelName, uint64_t FeatureID,
+ int FeatureValue) override;
+ virtual bool setCustomFeature(std::string ModelName, uint64_t FeatureID,
+ int64_t FeatureValue) override;
+ virtual bool setCustomFeature(std::string ModelName, uint64_t FeatureID,
+ double FeatureValue) override;
+ virtual bool setCustomFeature(std::string ModelName, uint64_t FeatureID,
+ float FeatureValue) override;
+ virtual bool setCustomFeature(std::string ModelName, uint64_t FeatureID,
+ bool FeatureValue) override;
+
+ virtual bool setCustomFeature(std::string ModelName, std::string FeatureName,
+ int FeatureValue) override;
+ virtual bool setCustomFeature(std::string ModelName, std::string FeatureName,
+ int64_t FeatureValue) override;
+ virtual bool setCustomFeature(std::string ModelName, std::string FeatureName,
+ double FeatureValue) override;
+ virtual bool setCustomFeature(std::string ModelName, std::string FeatureName,
+ float FeatureValue) override;
+ virtual bool setCustomFeature(std::string ModelName, std::string FeatureName,
+ bool FeatureValue) override;
+
+ // Replace all features with the given feature values.
+ // Activate the specified model.
+ virtual bool
+ initializeFeatures(std::string ModelName,
+ const std::vector<std::pair<uint64_t, std::string>>
+ &FeatureValues) override;
+
+ virtual bool
+ initializeFeatures(std::string ModelName,
+ const std::vector<std::pair<std::string, std::string>>
+ &FeatureValues) override;
+
+ // Set features with the specified feature values.
+ // Not changing the currently active model.
+ virtual bool
+ setCustomFeatures(std::string ModelName,
+ const std::vector<std::pair<uint64_t, std::string>>
+ &FeatureValues) override;
+
+ virtual bool
+ setCustomFeatures(std::string ModelName,
+ const std::vector<std::pair<std::string, std::string>>
+ &FeatureValues) override;
+
+ // Run a model with the specified name. Return false if the execution was not
+ // possible or an error was encountered.
+ virtual bool runModel(std::string ModelName) override;
+
+ // Return the type of an input within the model specified by the name.
+ virtual std::string getInputType(std::string ModelName,
+ std::string InputName);
+
+ // Return the type of an output within the model specified by the name.
+ virtual std::string getOutputType(std::string ModelName,
+ std::string OutputName) override;
+
+ // Return model results, based on the output name.
+ virtual int getModelResultI(std::string OutputName) override;
+ virtual int64_t getModelResultI64(std::string OutputName) override;
+ virtual float getModelResultF(std::string OutputName) override;
+ virtual double getModelResultD(std::string OutputName) override;
+ virtual bool getModelResultB(std::string OutputName) override;
+
+ // Get status of the ML interface. Return zero if succeeded.
+ virtual int getStatus() override;
+
+ // Free up memory taken by a model.
+ virtual bool releaseModel(std::string ModelName) override;
+
+ // Close interface when done. Return false if the command was not successful.
+ // In some cases this just requires a constructor for this class to be called,
+ // but in others, additional steps that require feedback may be needed.
+ virtual bool closeMLInterface() override;
+
+private:
+ uint64_t NextID;
+
+ std::string CurrentlyActiveModel;
+
+ // Mapping model names to their corresponding Model
+ std::unordered_map<std::string, std::shared_ptr<Model>> ModelMap;
+
+ // Mapping model names to their corresponding Runner
+ std::unordered_map<std::string, std::shared_ptr<ACPOModelRunner>> RunnerMap;
+
+ std::string readModelParam(std::string FilePath, std::string Param);
+
+ void readFeatures(std::string FilePath,
+ std::vector<std::pair<std::string, std::string>> &Features);
+ void readOutputs(std::string FilePath,
+ std::vector<std::pair<std::string, std::string>> &Outputs);
+
+ typedef std::unique_ptr<ACPOModelRunner> (*CreateModelRunnerFunction)(
+ std::vector<std::pair<std::string, std::string>>,
+ StringRef); // function pointer type
+ const static std::unordered_map<std::string, CreateModelRunnerFunction>
+ CreateModelRunnerMap;
+};
+
+std::shared_ptr<ACPOMLInterface> createPersistentCompiledMLIF();
+
+} // namespace llvm
+
+#endif // LLVM_ANALYSIS_ACPOML_INTERFACE_H
diff --git a/llvm/include/llvm/Analysis/ACPOModel.h b/llvm/include/llvm/Analysis/ACPOModel.h
new file mode 100644
index 000000000000..34dbc0fdb8bf
--- /dev/null
+++ b/llvm/include/llvm/Analysis/ACPOModel.h
@@ -0,0 +1,122 @@
+//===- ACPOModel.h - AI-Enabled Continuous Program Optimization -----------===//
+//
+// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
+// See https://llvm.org/LICENSE.txt for license information.
+// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
+//
+//===----------------------------------------------------------------------===//
+
+#ifndef LLVM_ANALYSIS_ACPOMODEL_H
+#define LLVM_ANALYSIS_ACPOMODEL_H
+
+#include "llvm/Analysis/ACPOMLInterface.h"
+#include "llvm/IR/Constants.h"
+#include "llvm/IR/Type.h"
+#include <map>
+#include <string>
+#include <tuple>
+#include <unordered_map>
+
+namespace llvm {
+
+class OptimizationRemarkEmitter;
+class LLVMContext;
+
+class ACPOAdvice {
+public:
+ struct FieldType {
+ Type::TypeID T;
+ Constant *Val;
+ };
+
+ ACPOAdvice() {}
+ ACPOAdvice(std::unique_ptr<ACPOAdvice> &ResultFormat);
+ virtual ~ACPOAdvice() {};
+
+ Constant *getField(std::string name) {
+ auto Search = FieldMap.find(name);
+ if (Search == FieldMap.end()) {
+ return nullptr;
+ }
+ return Search->second.Val;
+ }
+
+ void reserveField(std::string name, Type::TypeID &ID) {
+ FieldMap[name].T = ID;
+ FieldMap[name].Val = nullptr;
+ }
+
+ void addField(std::string name, Constant *Val) {
+ assert(Val != nullptr);
+ FieldMap[name].T = Val->getType()->getTypeID();
+ FieldMap[name].Val = Val;
+ }
+
+ std::unordered_map<std::string, struct FieldType> &getFieldMap() {
+ return FieldMap;
+ }
+
+private:
+ std::unordered_map<std::string, struct FieldType> FieldMap;
+};
+
+class ACPOModel {
+public:
+ ACPOModel(OptimizationRemarkEmitter *OptReEm, bool UseML = true) :
+ ORE(OptReEm), ShouldUseML(UseML) {
+ ResultFormat = std::make_unique<ACPOAdvice>();
+ assert(ResultFormat != nullptr);
+ }
+
+ ~ACPOModel() {}
+
+ bool isForcedToStop() const { return ForceStop; }
+
+ // This is a interface method to return result of estimation either via an ML
+ // model or by employing a heuristic. The ML version should be implemented in
+ // the getAdviceML, which can be overwritten when necessary. The non-ML
+ // version should be implemented in getAdviceNoML and that should always be
+ // overwritten (and it will be marked as pure (=0) to force the programmer
+ // to do so).
+ std::unique_ptr<ACPOAdvice> getAdvice();
+ void addRequiredResultField(std::string name, Type::TypeID &ID);
+
+ void setContextPtr(LLVMContext *C) { Context = C; }
+ LLVMContext *getContextPtr() { return Context; }
+
+ void setMLIF(std::shared_ptr<ACPOMLInterface> ML) { MLIF = ML; }
+ std::shared_ptr<ACPOMLInterface> getMLIF() { return MLIF; }
+
+protected:
+ void addFeature(int64_t ID, Constant *Val);
+ virtual void sendCustomFeatures() {}
+ virtual void prepareModelInput();
+ virtual bool runModel(std::unique_ptr<ACPOAdvice> &Result);
+
+ virtual std::unique_ptr<ACPOAdvice> getAdviceML();
+ virtual std::unique_ptr<ACPOAdvice> getAdviceNoML() = 0;
+
+private:
+ // Pointer to means of feedback propagation
+ OptimizationRemarkEmitter *ORE;
+
+ // We may need LLVMContext to set values of a Constant
+ LLVMContext *Context = nullptr;
+
+ // Specify expected format of the ACPOAdvice result.
+ std::unique_ptr<ACPOAdvice> ResultFormat = nullptr;
+
+ // Custom feature list.
+ std::unordered_map<uint64_t, Constant *> CustomFeatureMap;
+
+ // Interface to ML framework.
+ std::shared_ptr<ACPOMLInterface> MLIF = nullptr;
+
+ // Specify if ML infra is in use
+ bool ShouldUseML = false;
+ bool ForceStop = false;
+};
+
+} // namespace llvm
+
+#endif // LLVM_ANALYSIS_ACPOMODEL_H
diff --git a/llvm/include/llvm/Analysis/ACPOModelRunner.h b/llvm/include/llvm/Analysis/ACPOModelRunner.h
new file mode 100644
index 000000000000..819e17f71103
--- /dev/null
+++ b/llvm/include/llvm/Analysis/ACPOModelRunner.h
@@ -0,0 +1,39 @@
+//===- ACPOModelRunner.h - AI-Enabled Continuous Program Optimization -----===//
+//
+// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
+// See https://llvm.org/LICENSE.txt for license information.
+// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
+//
+//===----------------------------------------------------------------------===//
+
+#ifndef LLVM_ANALYSIS_ACPOMODEL_H
+#define LLVM_ANALYSIS_ACPOMODEL_H
+
+#include "llvm/Analysis/MLModelRunner.h"
+
+namespace llvm {
+
+class ACPOModelRunner : public MLModelRunner {
+public:
+ virtual bool setCustomFeature(int FeatureIndex, int FeatureValue) = 0;
+ virtual bool setCustomFeature(int FeatureIndex, int64_t FeatureValue) = 0;
+ virtual bool setCustomFeature(int FeatureIndex, double FeatureValue) = 0;
+ virtual bool setCustomFeature(int FeatureIndex, float FeatureValue) = 0;
+ virtual bool setCustomFeature(int FeatureIndex, bool FeatureValue) = 0;
+
+ virtual bool runModel() = 0;
+
+ virtual int getModelResultI(std::string OutputName) = 0;
+ virtual int64_t getModelResultI64(std::string OutputName) = 0;
+ virtual float getModelResultF(std::string OutputName) = 0;
+ virtual double getModelResultD(std::string OutputName) = 0;
+ virtual bool getModelResultB(std::string OutputName) = 0;
+
+protected:
+ ACPOModelRunner(LLVMContext &Ctx, size_t NrInputs)
+ : MLModelRunner(Ctx, MLModelRunner::Kind::Release, NrInputs) {}
+};
+
+} // namespace llvm
+
+#endif // LLVM_ANALYSIS_ACPOMODEL_H
diff --git a/llvm/include/llvm/Analysis/AOTModelRunner.h b/llvm/include/llvm/Analysis/AOTModelRunner.h
new file mode 100644
index 000000000000..abc6258c4f09
--- /dev/null
+++ b/llvm/include/llvm/Analysis/AOTModelRunner.h
@@ -0,0 +1,203 @@
+//===- AOTModelRunner.h - AI-Enabled Continuous Program Optimization ------===//
+//
+// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
+// See https://llvm.org/LICENSE.txt for license information.
+// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
+//
+//===----------------------------------------------------------------------===//
+
+#ifndef LLVM_ANALYSIS_AOTMODEL_H
+#define LLVM_ANALYSIS_AOTMODEL_H
+
+#include "llvm/Analysis/ACPOModelRunner.h"
+#include "llvm/Analysis/TensorSpec.h"
+
+#define DEBUG_TYPE "acpo-aot"
+
+namespace llvm {
+
+template <class TGen> class AOTModelRunner : public ACPOModelRunner {
+public:
+ /// FeatureNames' type should be an indexed collection of std::string, like
+ /// std::array or std::vector, that has a size() method.
+ /// In the future, this method could be expanded to allow AOT models with
+ /// multiple outputs, by taking in a vector of string pairs similar to the
+ /// Features vector.
+ /// The current implementation does work for AOT models with a single output
+ /// which is a vector (or higher-dimensional tensor) of multiple values.
+ AOTModelRunner(
+ LLVMContext &Ctx,
+ const std::vector<std::pair<std::string, std::string>> &Features,
+ StringRef DecisionName, StringRef FeedPrefix = "feed_",
+ StringRef FetchPrefix = "fetch_")
+ : ACPOModelRunner(Ctx, Features.size()),
+ CompiledModel(std::make_unique<TGen>()) {
+ assert(CompiledModel && "The CompiledModel should be valid");
+
+ for (size_t I = 0; I < Features.size(); ++I) {
+ const int Index =
+ CompiledModel->LookupArgIndex(FeedPrefix.str() + Features[I].first);
+ void *Buffer = nullptr;
+ if (Index >= 0) {
+ Buffer = CompiledModel->arg_data(Index);
+ } else {
+ LLVM_DEBUG(dbgs() << "Warning: AOTModelRunner was unable to find the "
+ "feature "
+ << (FeedPrefix.str() + Features[I].first)
+ << " in the compiled model\n");
+ }
+ // The order of features passed to the model runner is important, it
+ // determines their index
+ TensorSpec Spec = makeSpec(Features[I].first, Features[I].second);
+ setUpBufferForTensor(I, Spec, Buffer);
+ }
+
+ ResultIndex = CompiledModel->LookupResultIndex(FetchPrefix.str() +
+ DecisionName.str());
+ assert(ResultIndex >= 0 && "Cannot find DecisionName in inlining model");
+ }
+
+ virtual ~AOTModelRunner() = default;
+
+ static bool classof(const ACPOModelRunner *R) {
+ return R->getKind() == ACPOModelRunner::Kind::Release;
+ }
+
+ bool setCustomFeature(int FeatureIndex, int FeatureValue) override {
+ LLVM_DEBUG(dbgs() << "AOTModelRunner: setting int feature " << FeatureIndex
+ << " to " << FeatureValue << "\n");
+ *getTensor<int>(FeatureIndex) = FeatureValue;
+ return true;
+ }
+ bool setCustomFeature(int FeatureIndex, int64_t FeatureValue) override {
+ LLVM_DEBUG(dbgs() << "AOTModelRunner: setting int64 feature "
+ << FeatureIndex << " to " << FeatureValue << "\n");
+ *getTensor<int64_t>(FeatureIndex) = FeatureValue;
+ return true;
+ }
+ bool setCustomFeature(int FeatureIndex, double FeatureValue) override {
+ LLVM_DEBUG(dbgs() << "AOTModelRunner: setting double feature "
+ << FeatureIndex << " to " << FeatureValue << "\n");
+ *getTensor<double>(FeatureIndex) = FeatureValue;
+ return true;
+ }
+ bool setCustomFeature(int FeatureIndex, float FeatureValue) override {
+ LLVM_DEBUG(dbgs() << "AOTModelRunner: setting float feature "
+ << FeatureIndex << " to " << FeatureValue << "\n");
+ *getTensor<float>(FeatureIndex) = FeatureValue;
+ return true;
+ }
+ bool setCustomFeature(int FeatureIndex, bool FeatureValue) override {
+ // There are no bool tensors, so assume int for now
+ LLVM_DEBUG(dbgs() << "AOTModelRunner: setting bool feature " << FeatureIndex
+ << " to " << FeatureValue << "\n");
+ *getTensor<int>(FeatureIndex) = FeatureValue;
+ return true;
+ }
+
+ bool runModel() override {
+ CompiledModel->Run();
+ return true;
+ }
+
+ int getModelResultI(std::string OutputName) override {
+ void *Data = CompiledModel->result_data(ResultIndex);
+ int Result = *reinterpret_cast<int *>(Data);
+ LLVM_DEBUG(dbgs() << "Returning int model result " << OutputName << " = "
+ << Result << "\n");
+ return Result;
+ }
+
+ int64_t getModelResultI64(std::string OutputName) override {
+ void *Data = CompiledModel->result_data(ResultIndex);
+ int64_t Result = *reinterpret_cast<int64_t *>(Data);
+ LLVM_DEBUG(dbgs() << "Returning int64 model result " << OutputName << " = "
+ << Result << "\n");
+ return Result;
+ }
+
+ float getModelResultF(std::string OutputName) override {
+ void *Data = CompiledModel->result_data(ResultIndex);
+ float Result = *reinterpret_cast<float *>(Data);
+ LLVM_DEBUG(dbgs() << "Returning float model result " << OutputName << " = "
+ << Result << "\n");
+ return Result;
+ }
+
+ double getModelResultD(std::string OutputName) override {
+ void *Data = CompiledModel->result_data(ResultIndex);
+ double Result = *reinterpret_cast<double *>(Data);
+ LLVM_DEBUG(dbgs() << "Returning double model result " << OutputName << " = "
+ << Result << "\n");
+ return Result;
+ }
+
+ bool getModelResultB(std::string OutputName) override {
+ // Since there are no bool tensors, use int and return the corresponding
+ // result
+ void *Data = CompiledModel->result_data(ResultIndex);
+ bool Result = (*reinterpret_cast<int *>(Data)) > 0;
+ LLVM_DEBUG(dbgs() << "Returning bool model result " << OutputName << " = "
+ << Result << "\n");
+ return Result;
+ }
+
+protected:
+ std::unique_ptr<TGen> CompiledModel;
+
+private:
+ void *evaluateUntyped() override {
+ CompiledModel->Run();
+ return CompiledModel->result_data(ResultIndex);
+ }
+
+ llvm::TensorSpec makeSpec(std::string Name, std::string Type) {
+ std::vector<int64_t> Shape{};
+ // If the string is of the form "float32[7][8]", read the value in brackets
+ // as the shape (read from left to right)
+ size_t RightBracket = 0;
+ size_t LeftBracket = 0;
+ do {
+ LeftBracket = Type.find("[", RightBracket + 1);
+ if (LeftBracket == std::string::npos) {
+ break;
+ }
+ RightBracket = Type.find("]", LeftBracket + 1);
+ size_t Value = std::stol(
+ Type.substr(LeftBracket + 1, RightBracket - LeftBracket - 1));
+ Shape.push_back(Value);
+ } while (RightBracket != std::string::npos);
+
+ // Remove array indices to just get type
+ if (Type.find("[") != std::string::npos) {
+ Type = Type.substr(0, Type.find("["));
+ }
+
+ if (Shape.size() == 0)
+ Shape.push_back(1); // Default shape is {1}
+
+ if (Type == "int64") {
+ return TensorSpec::createSpec<int64_t>(Name, Shape);
+ }
+ if (Type == "int32") {
+ return TensorSpec::createSpec<int32_t>(Name, Shape);
+ }
+ if (Type == "int" || Type == "bool") {
+ // There are no bool tensors, so assume int for now
+ return TensorSpec::createSpec<int>(Name, Shape);
+ }
+ if (Type == "float64") {
+ return TensorSpec::createSpec<double>(Name, Shape);
+ }
+ if (Type == "float32") {
+ return TensorSpec::createSpec<float>(Name, Shape);
+ }
+ assert(false && "ACPO AOT: received unknown feature type");
+ }
+
+ int32_t ResultIndex = -1;
+};
+
+} // namespace llvm
+
+#endif // LLVM_ANALYSIS_AOTMODEL_H
diff --git a/llvm/include/llvm/Analysis/CallHeight.h b/llvm/include/llvm/Analysis/CallHeight.h
new file mode 100644
index 000000000000..c1251081f525
--- /dev/null
+++ b/llvm/include/llvm/Analysis/CallHeight.h
@@ -0,0 +1,72 @@
+//===- CallHeight.h - Call height for function ------------------*- C++ -*-===//
+//
+// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
+// See https://llvm.org/LICENSE.txt for license information.
+// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
+//
+//===----------------------------------------------------------------------===//
+//
+// This header file defines passes to get the call height of functions.
+//
+//===----------------------------------------------------------------------===//
+
+#ifndef LLVM_ANALYSIS_CALLHEIGHT
+#define LLVM_ANALYSIS_CALLHEIGHT
+
+#include "llvm/IR/Module.h"
+#include "llvm/IR/PassManager.h"
+#include "llvm/Pass.h"
+
+#include <unordered_map>
+#include <map>
+
+namespace llvm {
+
+class CallHeight {
+private:
+ /// Map from function to its level (callheight)
+ std::unique_ptr<std::map<const Function *, unsigned>> Levels;
+
+public:
+ CallHeight(Module &M);
+
+ // Change this to getHeight
+ unsigned getLevel(Function &F);
+
+ bool invalidate(Module &, const PreservedAnalyses &PA,
+ ModuleAnalysisManager::Invalidator &) {
+ return false;
+ }
+};
+
+/// This analysis computes the mapping from function to level (callheight)
+/// for MLInliner
+class CallHeightAnalysis : public AnalysisInfoMixin<CallHeightAnalysis> {
+public:
+ static AnalysisKey Key;
+ using Result = CallHeight;
+
+ Result run(Module &M, ModuleAnalysisManager &MAM);
+};
+
+/// Legacy wrapper pass to provide the CallHeightAnalysis object.
+class CallHeightAnalysisWrapper : public ModulePass {
+ std::unique_ptr<llvm::CallHeight> Result;
+
+public:
+ static char ID;
+
+ CallHeightAnalysisWrapper() : ModulePass(ID) {}
+
+ bool runOnModule(Module &M) override;
+
+ llvm::CallHeight &getResult() { return *Result; }
+ const llvm::CallHeight &getResult() const { return *Result; }
+ void getAnalysisUsage(AnalysisUsage &AU) const override;
+};
+
+Pass *createCallHeightAnalysisWrapper();
+
+} // namespace llvm
+
+#endif
diff --git a/llvm/include/llvm/Analysis/DumpCallsite.h b/llvm/include/llvm/Analysis/DumpCallsite.h
new file mode 100644
index 000000000000..9f80fe1cb985
--- /dev/null
+++ b/llvm/include/llvm/Analysis/DumpCallsite.h
@@ -0,0 +1,27 @@
+//===- DumpCallSite.h - Dump information about a callsite -------*- C++ -*-===//
+//
+// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
+// See https://llvm.org/LICENSE.txt for license information.
+// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
+//
+//===----------------------------------------------------------------------===//
+//
+// This header file defines the pass used to dump a callsite.
+//
+//===----------------------------------------------------------------------===//
+
+#ifndef LLVM_ANALYSIS_DUMPCALLSITE
+#define LLVM_ANALYSIS_DUMPCALLSITE
+
+#include "llvm/IR/PassManager.h"
+
+namespace llvm {
+
+class DumpCallsitePass : public PassInfoMixin<DumpCallsitePass> {
+public:
+ PreservedAnalyses run(Function &F, FunctionAnalysisManager &FAM);
+};
+
+} // namespace llvm
+
+#endif
diff --git a/llvm/include/llvm/Analysis/DumpFeature.h b/llvm/include/llvm/Analysis/DumpFeature.h
new file mode 100644
index 000000000000..226e06cf5600
--- /dev/null
+++ b/llvm/include/llvm/Analysis/DumpFeature.h
@@ -0,0 +1,194 @@
+//===- DumpFeature.h - Dump features for a function -------------*- C++ -*-===//
+//
+// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
+// See https://llvm.org/LICENSE.txt for license information.
+// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
+//
+//===----------------------------------------------------------------------===//
+//
+// This header file defines passes to dump features for functions in an scc.
+//
+//===----------------------------------------------------------------------===//
+
+#ifndef LLVM_ANALYSIS_DUMPFEATURE
+#define LLVM_ANALYSIS_DUMPFEATURE
+
+#include "llvm/Analysis/BlockFrequencyInfo.h"
+#include "llvm/Analysis/CGSCCPassManager.h"
+#include "llvm/Analysis/CallGraph.h"
+#include "llvm/Analysis/CallGraphSCCPass.h"
+#include "llvm/Analysis/LoopInfo.h"
+#include "llvm/Analysis/TargetTransformInfo.h"
+#include "llvm/IR/Dominators.h"
+#include "llvm/IR/PassManager.h"
+#include "llvm/Pass.h"
+
+#include <map>
+
+// EnableFeatureDump - This boolean is set to true if '-enable-feature-dump' is
+// used as command line option. And we dump function features.
+extern bool EnableFeatureDump;
+
+namespace llvm {
+
+class DumpFeaturePass : public PassInfoMixin<DumpFeaturePass> {
+public:
+ PreservedAnalyses run(LazyCallGraph::SCC &C, CGSCCAnalysisManager &AM,
+ LazyCallGraph &CG, CGSCCUpdateResult &UR);
+
+private:
+ /// Get the caller height from cache or calculate from scratch
+ /// for a specific function F
+ int getCallHeight(LazyCallGraph::SCC &C, CGSCCAnalysisManager &AM,
+ LazyCallGraph &CG, Function *F);
+};
+
+class ACPOFIExtendedFeatures {
+public:
+ enum class NamedFeatureIndex : size_t {
+ InitialSize,
+ Blocks,
+ Calls,
+ IsLocal,
+ IsLinkOnceODR,
+ IsLinkOnce,
+ Loops,
+ MaxLoopDepth,
+ MaxDomTreeLevel,
+ PtrArgs,
+ PtrCallee,
+ CallReturnPtr,
+ ConditionalBranch,
+ CBwithArg,
+ CallerHeight,
+ CallUsage,
+ IsRecursive,
+ NumCallsiteInLoop,
+ NumOfCallUsesInLoop,
+ EntryBlockFreq,
+ MaxCallsiteBlockFreq,
+ NumNamedFeatures
+ };
+
+ enum class NamedFloatFeatureIndex : size_t {
+ InstructionPerBlock,
+ SuccessorPerBlock,
+ AvgVecInstr,
+ AvgNestedLoopLevel,
+ InstrPerLoop,
+ BlockWithMultipleSuccecorsPerLoop,
+ NumNamedFloatFeatures
+ };
+
+ struct FunctionFeatures {
+ static const size_t FeatureCount;
+
+ std::array<uint64_t,
+ static_cast<size_t>(NamedFeatureIndex::NumNamedFeatures)>
+ NamedFeatures = {{0}};
+ std::array<float, static_cast<size_t>(
+ NamedFloatFeatureIndex::NumNamedFloatFeatures)>
+ NamedFloatFeatures = {{0}};
+ std::vector<int32_t> InstructionHistogram;
+ std::vector<int32_t> InstructionPairHistogram;
+
+ void fillTensor(int32_t *Ptr) const;
+ uint64_t &operator[](NamedFeatureIndex Pos) {
+ return NamedFeatures[static_cast<size_t>(Pos)];
+ }
+ float &operator[](NamedFloatFeatureIndex Pos) {
+ return NamedFloatFeatures[static_cast<size_t>(Pos)];
+ }
+ };
+
+ ACPOFIExtendedFeatures() = default;
+
+ // Collect a number of features from the function F
+ static FunctionFeatures getFunctionFeatures(
+ Function &F, DominatorTree &DomTree, TargetTransformInfo &TTI,
+ LoopInfo &LI, FunctionAnalysisManager *FAM = nullptr, bool ValidSize = false,
+ bool ValidLoop = false, bool ValidTree = false);
+
+private:
+ // Loop related features, will update FF
+ static void updateLoopRelatedFeatures(Function &F, LoopInfo &LI,
+ FunctionFeatures &FF);
+ // Instruction and BasicBlock related features, will update FF
+ static void updateInstBBRelatedFeatures(Function &F, FunctionFeatures &FF);
+
+ // This function should mimic the behaviour of updating all features below at
+ // once:
+ // getMaxCallsiteBlockFreq
+ // updateCallsiteRelatedFeatures
+ // updateInstBBRelatedFeatures
+ static void
+ updateBBLoopCallsiteBFFeatures(Function &F, FunctionFeatures &FF,
+ LoopInfo &LI,
+ FunctionAnalysisManager *FAM = nullptr);
+};
+
+const std::map<ACPOFIExtendedFeatures::NamedFeatureIndex, std::string>
+ NamedFeatureIndexToName = {
+ {ACPOFIExtendedFeatures::NamedFeatureIndex::InitialSize, "InitialSize"},
+ {ACPOFIExtendedFeatures::NamedFeatureIndex::Blocks, "Blocks"},
+ {ACPOFIExtendedFeatures::NamedFeatureIndex::Calls, "Calls"},
+ {ACPOFIExtendedFeatures::NamedFeatureIndex::IsLocal, "IsLocal"},
+ {ACPOFIExtendedFeatures::NamedFeatureIndex::IsLinkOnceODR,
+ "IsLinkOnceODR"},
+ {ACPOFIExtendedFeatures::NamedFeatureIndex::IsLinkOnce, "IsLinkOnce"},
+ {ACPOFIExtendedFeatures::NamedFeatureIndex::Loops, "Loops"},
+ {ACPOFIExtendedFeatures::NamedFeatureIndex::MaxLoopDepth,
+ "MaxLoopDepth"},
+ {ACPOFIExtendedFeatures::NamedFeatureIndex::MaxDomTreeLevel,
+ "MaxDomTreeLevel"},
+ {ACPOFIExtendedFeatures::NamedFeatureIndex::PtrArgs, "PtrArgs"},
+ {ACPOFIExtendedFeatures::NamedFeatureIndex::PtrCallee, "PtrCallee"},
+ {ACPOFIExtendedFeatures::NamedFeatureIndex::CallReturnPtr,
+ "CallReturnPtr"},
+ {ACPOFIExtendedFeatures::NamedFeatureIndex::ConditionalBranch,
+ "ConditionalBranch"},
+ {ACPOFIExtendedFeatures::NamedFeatureIndex::CBwithArg, "CBwithArg"},
+ {ACPOFIExtendedFeatures::NamedFeatureIndex::CallerHeight,
+ "CallerHeight"},
+ {ACPOFIExtendedFeatures::NamedFeatureIndex::CallUsage, "CallUsage"},
+ {ACPOFIExtendedFeatures::NamedFeatureIndex::IsRecursive, "IsRecursive"},
+ {ACPOFIExtendedFeatures::NamedFeatureIndex::NumCallsiteInLoop,
+ "NumCallsiteInLoop"},
+ {ACPOFIExtendedFeatures::NamedFeatureIndex::NumOfCallUsesInLoop,
+ "NumOfCallUsesInLoop"},
+ {ACPOFIExtendedFeatures::NamedFeatureIndex::EntryBlockFreq,
+ "EntryBlockFreq"},
+ {ACPOFIExtendedFeatures::NamedFeatureIndex::MaxCallsiteBlockFreq,
+ "MaxCallsiteBlockFreq"}};
+
+const std::map<ACPOFIExtendedFeatures::NamedFloatFeatureIndex, std::string>
+ FloatFeatureIndexToName = {
+ {ACPOFIExtendedFeatures::NamedFloatFeatureIndex::InstructionPerBlock,
+ "InstructionPerBlock"},
+ {ACPOFIExtendedFeatures::NamedFloatFeatureIndex::SuccessorPerBlock,
+ "SuccessorPerBlock"},
+ {ACPOFIExtendedFeatures::NamedFloatFeatureIndex::AvgVecInstr,
+ "AvgVecInstr"},
+ {ACPOFIExtendedFeatures::NamedFloatFeatureIndex::AvgNestedLoopLevel,
+ "AvgNestedLoopLevel"},
+ {ACPOFIExtendedFeatures::NamedFloatFeatureIndex::InstrPerLoop,
+ "InstrPerLoop"},
+ {ACPOFIExtendedFeatures::NamedFloatFeatureIndex::
+ BlockWithMultipleSuccecorsPerLoop,
+ "BlockWithMultipleSuccecorsPerLoop"}};
+
+ACPOFIExtendedFeatures::NamedFeatureIndex &
+operator++(ACPOFIExtendedFeatures::NamedFeatureIndex &n);
+
+ACPOFIExtendedFeatures::NamedFeatureIndex
+operator++(ACPOFIExtendedFeatures::NamedFeatureIndex &n, int);
+
+ACPOFIExtendedFeatures::NamedFloatFeatureIndex &
+operator++(ACPOFIExtendedFeatures::NamedFloatFeatureIndex &n);
+
+ACPOFIExtendedFeatures::NamedFloatFeatureIndex
+operator++(ACPOFIExtendedFeatures::NamedFloatFeatureIndex &n, int);
+
+} // namespace llvm
+
+#endif
diff --git a/llvm/include/llvm/Analysis/LoopInfo.h b/llvm/include/llvm/Analysis/LoopInfo.h
index 9be3e056cf76..ea4cb7f7c684 100644
--- a/llvm/include/llvm/Analysis/LoopInfo.h
+++ b/llvm/include/llvm/Analysis/LoopInfo.h
@@ -386,6 +386,17 @@ public:
void dump() const;
void dumpVerbose() const;
+#if defined(ENABLE_ACPO)
+ /// Print loop IR wrapped in a dummy function
+ void printWithFunctionWrapper(raw_ostream &ROS, Function *F,
+ ArrayRef<BasicBlock *> LoopBlocks,
+ BasicBlock *Header,
+ SmallVector<BasicBlock *, 8> ExitBlocks,
+ AssemblyAnnotationWriter *AAW,
+ bool ShouldPreserveUseListOrder,
+ bool IsForDebug) const;
+#endif
+
/// Return the debug location of the start of this loop.
/// This looks for a BB terminating instruction with a known debug
/// location by looking at the preheader and header blocks. If it
diff --git a/llvm/include/llvm/Analysis/ModelDataCollector.h b/llvm/include/llvm/Analysis/ModelDataCollector.h
new file mode 100644
index 000000000000..ad3fc476a9b2
--- /dev/null
+++ b/llvm/include/llvm/Analysis/ModelDataCollector.h
@@ -0,0 +1,108 @@
+//===- ModelDataCollector.h - Data collector for ML model -----------------===//
+//
+// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
+// See https://llvm.org/LICENSE.txt for license information.
+// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
+//
+//===----------------------------------------------------------------------===//
+
+#ifndef LLVM_ANALYSIS_MODELDATACOLLECTOR_H
+#define LLVM_ANALYSIS_MODELDATACOLLECTOR_H
+
+#if defined(ENABLE_ACPO)
+#include "llvm/ADT/StringMap.h"
+#include "llvm/Analysis/ACPOCollectFeatures.h"
+#include "llvm/Analysis/LoopInfo.h"
+#include "llvm/Support/FormattedStream.h"
+#include "llvm/Support/raw_ostream.h"
+#include <string>
+#include <vector>
+
+namespace llvm {
+class ModelDataCollector {
+public:
+ enum DumpOption { function, loop, before, after };
+
+ ModelDataCollector(formatted_raw_ostream &OS, std::string OutputFileName = "")
+ : OutputFileName(OutputFileName), Out(OS) {}
+
+ ~ModelDataCollector() {}
+
+ std::string getDumpOptionAsString(DumpOption DO);
+ std::string getIRFileName(StringRef Key);
+ std::string getOutputFileName();
+ bool isEmptyOutputFile();
+ //std::string generateIRFileName(autotuning::CodeRegion CR);
+ std::string demangleName(const std::string &Name);
+ std::vector<std::pair<std::string, std::string>> getFeatures();
+ std::unique_ptr<raw_ostream>
+ createFile(const Twine &FilePath, const Twine &FileName, std::error_code &EC);
+ StringMap<std::string> getIRFileNameMap();
+ void
+ setFeatures(std::vector<std::pair<std::string, std::string>> NewFeatures);
+ void setIRFileNameMap(StringMap<std::string> IRFileNameMap);
+ void
+ addFeatures(std::vector<std::pair<std::string, std::string>> NewFeatures);
+
+ // Print out the features
+ void printRow(bool printHeader = false);
+
+ // Create the directory structure and store IR files in their corresponding
+ // directory
+ void writeIR(Loop *L, Function *F, std::string NewIRFileName,
+ std::string PassName, DumpOption DumpBeforeOrAfter,
+ bool PrintLoop, bool PrintFunction,
+ bool OverwriteIRFile = false);
+
+ // Print the loop IR to a file
+ void createIRFileForLoop(Loop *L, const Twine &IRFilePath,
+ const Twine &NewIRFileName, bool OverwriteIRFile);
+
+ // Print the function IR to a file
+ void createIRFileForFunction(Function *F, const Twine &IRFilePath,
+ const Twine &NewIRFileName,
+ bool OverwriteIRFile);
+
+ virtual void collectFeatures(Loop *L, const std::string &ModuleName,
+ const std::string &FuncName,
+ const std::string &LoopName);
+
+ virtual void collectFeatures();
+
+ // FeatureCollectInfo contains the information of registered feature.
+ struct FeatureCollectInfo {
+ std::unique_ptr<ACPOCollectFeatures::FeaturesInfo> FeaturesInfo;
+ std::unique_ptr<ACPOCollectFeatures::Scopes> RegisteredScopes;
+ std::unique_ptr<ACPOCollectFeatures::GroupIDs> RegisteredGroupIDs;
+ std::unique_ptr<ACPOCollectFeatures::FeatureInfo> GlobalInfo;
+ std::unique_ptr<ACPOCollectFeatures> FeatureCollector;
+ std::string Prefix;
+ std::string Postfix;
+ };
+
+ void registerFeature(ACPOCollectFeatures::FeaturesInfo, std::string = "",
+ std::string = "");
+ void registerFeature(ACPOCollectFeatures::Scopes,
+ ACPOCollectFeatures::FeatureInfo, std::string = "",
+ std::string = "");
+ void registerFeature(ACPOCollectFeatures::GroupIDs,
+ ACPOCollectFeatures::FeatureInfo, std::string = "",
+ std::string = "");
+ void resetRegisteredFeatures();
+
+protected:
+ // Collected features
+ std::vector<std::pair<std::string, std::string>> Features;
+ // NOTE: OutputFileName being empty (null) is treated as stdout
+ std::string OutputFileName;
+ std::vector<std::unique_ptr<FeatureCollectInfo>> FeatureCollectInfos;
+
+private:
+ // Stream for dumping training data
+ formatted_raw_ostream &Out;
+ StringMap<std::string> IRFileNames;
+};
+} // namespace llvm
+
+#endif // ENABLE_ACPO
+#endif // LLVM_ANALYSIS_MODELDATACOLLECTOR_H
diff --git a/llvm/include/llvm/InitializePasses.h b/llvm/include/llvm/InitializePasses.h
index 80bec2d82e24..7fdb5db67c16 100644
--- a/llvm/include/llvm/InitializePasses.h
+++ b/llvm/include/llvm/InitializePasses.h
@@ -100,6 +100,7 @@ void initializeDomPrinterWrapperPassPass(PassRegistry &);
void initializeDomViewerWrapperPassPass(PassRegistry &);
void initializeDominanceFrontierWrapperPassPass(PassRegistry&);
void initializeDominatorTreeWrapperPassPass(PassRegistry&);
+void initializeDumpCallsiteLegacyPass(PassRegistry &);
void initializeDwarfEHPrepareLegacyPassPass(PassRegistry &);
void initializeEarlyCSELegacyPassPass(PassRegistry&);
void initializeEarlyCSEMemSSALegacyPassPass(PassRegistry&);
@@ -124,6 +125,7 @@ void initializeFixIrreduciblePass(PassRegistry &);
void initializeFixupStatepointCallerSavedPass(PassRegistry&);
void initializeFlattenCFGLegacyPassPass(PassRegistry &);
void initializeFuncletLayoutPass(PassRegistry&);
+void initializeCallHeightAnalysisWrapperPass(PassRegistry &);
void initializeGCMachineCodeAnalysisPass(PassRegistry&);
void initializeGCModuleInfoPass(PassRegistry&);
void initializeGVNLegacyPassPass(PassRegistry&);
@@ -132,6 +134,7 @@ void initializeGlobalsAAWrapperPassPass(PassRegistry&);
void initializeGuardWideningLegacyPassPass(PassRegistry&);
void initializeHardwareLoopsLegacyPass(PassRegistry&);
void initializeMIRProfileLoaderPassPass(PassRegistry &);
+void initializeInlineAdvisorAnalysisWrapperPass(PassRegistry &);
void initializeIRSimilarityIdentifierWrapperPassPass(PassRegistry&);
void initializeIRTranslatorPass(PassRegistry&);
void initializeIVUsersWrapperPassPass(PassRegistry&);
@@ -149,6 +152,11 @@ void initializeInterleavedLoadCombinePass(PassRegistry &);
void initializeIntervalPartitionPass(PassRegistry&);
void initializeJMCInstrumenterPass(PassRegistry&);
void initializeKCFIPass(PassRegistry &);
+void initializeLegacyFAMPass(PassRegistry &);
+void initializeLegacyFunctionPropertiesAnalysisPass(PassRegistry &);
+void initializeLegacyInlinerPassPass(PassRegistry &);
+void initializeLegacyInlineSizeEstimatorAnalysisPass(PassRegistry &);
+void initializeLegacyModuleInlinerWrapperPassPass(PassRegistry &);
void initializeLCSSAVerificationPassPass(PassRegistry&);
void initializeLCSSAWrapperPassPass(PassRegistry&);
void initializeLazyBlockFrequencyInfoPassPass(PassRegistry&);
diff --git a/llvm/lib/Analysis/ACPOCollectFeatures.cpp b/llvm/lib/Analysis/ACPOCollectFeatures.cpp
new file mode 100644
index 000000000000..f9de26483c76
--- /dev/null
+++ b/llvm/lib/Analysis/ACPOCollectFeatures.cpp
@@ -0,0 +1,1258 @@
+//===- ACPOCollectFeatures.cpp - ACPO Class for Feature Collection -------===//
+//
+// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
+// See https://llvm.org/LICENSE.txt for license information.
+// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
+//
+//===----------------------------------------------------------------------===//
+//
+// This file implements ACPOCollectFeatures class
+//
+//===----------------------------------------------------------------------===//
+
+#include "llvm/Analysis/ACPOCollectFeatures.h"
+#include "llvm/ADT/SCCIterator.h"
+// The ACPOFIModel.h currently contains only the cache system for
+// ACPOFIExtendedFeatures.
+#include "llvm/Analysis/ACPOFIModel.h"
+#include "llvm/Analysis/AssumptionCache.h"
+#include "llvm/Analysis/BlockFrequencyInfo.h"
+#include "llvm/Analysis/CallGraph.h"
+#include "llvm/Analysis/DumpFeature.h"
+#include "llvm/Analysis/FunctionPropertiesAnalysis.h"
+#include "llvm/Analysis/InlineAdvisor.h"
+#include "llvm/Analysis/InlineCost.h"
+#include "llvm/Analysis/OptimizationRemarkEmitter.h"
+#include "llvm/Analysis/TargetTransformInfo.h"
+#include "llvm/IR/Dominators.h"
+#include "llvm/IR/InstIterator.h"
+#include "llvm/IR/Instructions.h"
+#include "llvm/Support/Debug.h"
+
+#define DEBUG_TYPE "ACPOCollectFeatures"
+
+namespace llvm {
+
+// Helper function that is used to calculate features and each function should
+// registered in the CalculateFeatureMap.
+static void calculateFPIRelated(ACPOCollectFeatures &ACF,
+ const ACPOCollectFeatures::FeatureInfo &info);
+static void
+calculateCallerBlockFreq(ACPOCollectFeatures &ACF,
+ const ACPOCollectFeatures::FeatureInfo &info);
+static void
+calculateCallSiteHeight(ACPOCollectFeatures &ACF,
+ const ACPOCollectFeatures::FeatureInfo &info);
+static void
+calculateConstantParam(ACPOCollectFeatures &ACF,
+ const ACPOCollectFeatures::FeatureInfo &info);
+static void calculateCostEstimate(ACPOCollectFeatures &ACF,
+ const ACPOCollectFeatures::FeatureInfo &info);
+static void
+calculateEdgeNodeCount(ACPOCollectFeatures &ACF,
+ const ACPOCollectFeatures::FeatureInfo &info);
+static void
+calculateHotColdCallSite(ACPOCollectFeatures &ACF,
+ const ACPOCollectFeatures::FeatureInfo &info);
+static void calculateLoopLevel(ACPOCollectFeatures &ACF,
+ const ACPOCollectFeatures::FeatureInfo &info);
+static void
+calculateMandatoryKind(ACPOCollectFeatures &ACF,
+ const ACPOCollectFeatures::FeatureInfo &info);
+static void
+calculateMandatoryOnly(ACPOCollectFeatures &ACF,
+ const ACPOCollectFeatures::FeatureInfo &info);
+static void
+calculateInlineCostFeatures(ACPOCollectFeatures &ACF,
+ const ACPOCollectFeatures::FeatureInfo &info);
+static void calculateACPOFIExtendedFeaturesFeatures(
+ ACPOCollectFeatures &ACF, const ACPOCollectFeatures::FeatureInfo &info);
+static void
+calculateIsIndirectCall(ACPOCollectFeatures &ACF,
+ const ACPOCollectFeatures::FeatureInfo &info);
+static void
+calculateIsInInnerLoop(ACPOCollectFeatures &ACF,
+ const ACPOCollectFeatures::FeatureInfo &info);
+static void
+calculateIsMustTailCall(ACPOCollectFeatures &ACF,
+ const ACPOCollectFeatures::FeatureInfo &info);
+static void calculateIsTailCall(ACPOCollectFeatures &ACF,
+ const ACPOCollectFeatures::FeatureInfo &info);
+static void calculateOptCode(ACPOCollectFeatures &ACF,
+ const ACPOCollectFeatures::FeatureInfo &info);
+
+// Register FeatureIdx -> Feature name
+// FeatureIdx -> Scope, Scope -> FeatureIdx
+// FeatureIdx -> Group, Group -> FeatureIdx
+// FeatureIdx -> Calculating function
+#define REGISTER_NAME(INDEX_NAME, NAME) \
+ { ACPOCollectFeatures::FeatureIndex::INDEX_NAME, NAME }
+const std::unordered_map<ACPOCollectFeatures::FeatureIndex, std::string>
+ ACPOCollectFeatures::FeatureIndexToName{
+ REGISTER_NAME(SROASavings, "sroa_savings"),
+ REGISTER_NAME(SROALosses, "sroa_losses"),
+ REGISTER_NAME(LoadElimination, "load_elimination"),
+ REGISTER_NAME(CallPenalty, "call_penalty"),
+ REGISTER_NAME(CallArgumentSetup, "call_argument_setup"),
+ REGISTER_NAME(LoadRelativeIntrinsic, "load_relative_intrinsic"),
+ REGISTER_NAME(LoweredCallArgSetup, "lowered_call_arg_setup"),
+ REGISTER_NAME(IndirectCallPenalty, "indirect_call_penalty"),
+ REGISTER_NAME(JumpTablePenalty, "jump_table_penalty"),
+ REGISTER_NAME(CaseClusterPenalty, "case_cluster_penalty"),
+ REGISTER_NAME(SwitchPenalty, "switch_penalty"),
+ REGISTER_NAME(UnsimplifiedCommonInstructions,
+ "unsimplified_common_instructions"),
+ REGISTER_NAME(NumLoops, "num_loops"),
+ REGISTER_NAME(DeadBlocks, "dead_blocks"),
+ REGISTER_NAME(SimplifiedInstructions, "simplified_instructions"),
+ REGISTER_NAME(ConstantArgs, "constant_args"),
+ REGISTER_NAME(ConstantOffsetPtrArgs, "constant_offset_ptr_args"),
+ REGISTER_NAME(CallSiteCost, "callsite_cost"),
+ REGISTER_NAME(ColdCcPenalty, "cold_cc_penalty"),
+ REGISTER_NAME(LastCallToStaticBonus, "last_call_to_static_bonus"),
+ REGISTER_NAME(IsMultipleBlocks, "is_multiple_blocks"),
+ REGISTER_NAME(NestedInlines, "nested_inlines"),
+ REGISTER_NAME(NestedInlineCostEstimate, "nested_inline_cost_estimate"),
+ REGISTER_NAME(Threshold, "threshold"),
+ REGISTER_NAME(BasicBlockCount, "basic_block_count"),
+ REGISTER_NAME(BlocksReachedFromConditionalInstruction,
+ "conditionally_executed_blocks"),
+ REGISTER_NAME(Uses, "users"),
+ REGISTER_NAME(EdgeCount, "edge_count"),
+ REGISTER_NAME(NodeCount, "node_count"),
+ REGISTER_NAME(ColdCallSite, "cold_callsite"),
+ REGISTER_NAME(HotCallSite, "hot_callsite"),
+ REGISTER_NAME(ACPOFIExtendedFeaturesInitialSize, "InitialSize"),
+ REGISTER_NAME(ACPOFIExtendedFeaturesBlocks, "Blocks"),
+ REGISTER_NAME(ACPOFIExtendedFeaturesCalls, "Calls"),
+ REGISTER_NAME(ACPOFIExtendedFeaturesIsLocal, "IsLocal"),
+ REGISTER_NAME(ACPOFIExtendedFeaturesIsLinkOnceODR, "IsLinkOnceODR"),
+ REGISTER_NAME(ACPOFIExtendedFeaturesIsLinkOnce, "IsLinkOnce"),
+ REGISTER_NAME(ACPOFIExtendedFeaturesLoops, "Loops"),
+ REGISTER_NAME(ACPOFIExtendedFeaturesMaxLoopDepth, "MaxLoopDepth"),
+ REGISTER_NAME(ACPOFIExtendedFeaturesMaxDomTreeLevel, "MaxDomTreeLevel"),
+ REGISTER_NAME(ACPOFIExtendedFeaturesPtrArgs, "PtrArgs"),
+ REGISTER_NAME(ACPOFIExtendedFeaturesPtrCallee, "PtrCallee"),
+ REGISTER_NAME(ACPOFIExtendedFeaturesCallReturnPtr, "CallReturnPtr"),
+ REGISTER_NAME(ACPOFIExtendedFeaturesConditionalBranch,
+ "ConditionalBranch"),
+ REGISTER_NAME(ACPOFIExtendedFeaturesCBwithArg, "CBwithArg"),
+ REGISTER_NAME(ACPOFIExtendedFeaturesCallerHeight, "CallerHeight"),
+ REGISTER_NAME(ACPOFIExtendedFeaturesCallUsage, "CallUsage"),
+ REGISTER_NAME(ACPOFIExtendedFeaturesIsRecursive, "IsRecursive"),
+ REGISTER_NAME(ACPOFIExtendedFeaturesNumCallsiteInLoop,
+ "NumCallsiteInLoop"),
+ REGISTER_NAME(ACPOFIExtendedFeaturesNumOfCallUsesInLoop,
+ "NumOfCallUsesInLoop"),
+ REGISTER_NAME(ACPOFIExtendedFeaturesEntryBlockFreq, "EntryBlockFreq"),
+ REGISTER_NAME(ACPOFIExtendedFeaturesMaxCallsiteBlockFreq,
+ "MaxCallsiteBlockFreq"),
+ REGISTER_NAME(ACPOFIExtendedFeaturesInstructionPerBlock,
+ "InstructionPerBlock"),
+ REGISTER_NAME(ACPOFIExtendedFeaturesSuccessorPerBlock,
+ "SuccessorPerBlock"),
+ REGISTER_NAME(ACPOFIExtendedFeaturesAvgVecInstr, "AvgVecInstr"),
+ REGISTER_NAME(ACPOFIExtendedFeaturesAvgNestedLoopLevel,
+ "AvgNestedLoopLevel"),
+ REGISTER_NAME(ACPOFIExtendedFeaturesInstrPerLoop, "InstrPerLoop"),
+ REGISTER_NAME(ACPOFIExtendedFeaturesBlockWithMultipleSuccecorsPerLoop,
+ "BlockWithMultipleSuccecorsPerLoop"),
+ REGISTER_NAME(CallerBlockFreq, "block_freq"),
+ REGISTER_NAME(CallSiteHeight, "callsite_height"),
+ REGISTER_NAME(ConstantParam, "nr_ctant_params"),
+ REGISTER_NAME(CostEstimate, "cost_estimate"),
+ REGISTER_NAME(LoopLevel, "loop_level"),
+ REGISTER_NAME(MandatoryKind, "mandatory_kind"),
+ REGISTER_NAME(MandatoryOnly, "mandatory_only"),
+ REGISTER_NAME(OptCode, "opt_code"),
+ REGISTER_NAME(IsIndirectCall, "is_indirect"),
+ REGISTER_NAME(IsInInnerLoop, "is_in_inner_loop"),
+ REGISTER_NAME(IsMustTailCall, "is_must_tail"),
+ REGISTER_NAME(IsTailCall, "is_tail"),
+ REGISTER_NAME(NumOfFeatures,"num_features"),
+ };
+#undef REGISTER_NAME
+
+#define REGISTER_SCOPE(INDEX_NAME, NAME) \
+ { \
+ ACPOCollectFeatures::FeatureIndex::INDEX_NAME, \
+ ACPOCollectFeatures::Scope::NAME \
+ }
+const std::unordered_map<ACPOCollectFeatures::FeatureIndex,
+ ACPOCollectFeatures::Scope>
+ ACPOCollectFeatures::FeatureIndexToScope{
+ REGISTER_SCOPE(SROASavings, CallSite),
+ REGISTER_SCOPE(SROALosses, CallSite),
+ REGISTER_SCOPE(LoadElimination, CallSite),
+ REGISTER_SCOPE(CallPenalty, CallSite),
+ REGISTER_SCOPE(CallArgumentSetup, CallSite),
+ REGISTER_SCOPE(LoadRelativeIntrinsic, CallSite),
+ REGISTER_SCOPE(LoweredCallArgSetup, CallSite),
+ REGISTER_SCOPE(IndirectCallPenalty, CallSite),
+ REGISTER_SCOPE(JumpTablePenalty, CallSite),
+ REGISTER_SCOPE(CaseClusterPenalty, CallSite),
+ REGISTER_SCOPE(SwitchPenalty, CallSite),
+ REGISTER_SCOPE(UnsimplifiedCommonInstructions, CallSite),
+ REGISTER_SCOPE(NumLoops, CallSite),
+ REGISTER_SCOPE(DeadBlocks, CallSite),
+ REGISTER_SCOPE(SimplifiedInstructions, CallSite),
+ REGISTER_SCOPE(ConstantArgs, CallSite),
+ REGISTER_SCOPE(ConstantOffsetPtrArgs, CallSite),
+ REGISTER_SCOPE(CallSiteCost, CallSite),
+ REGISTER_SCOPE(ColdCcPenalty, CallSite),
+ REGISTER_SCOPE(LastCallToStaticBonus, CallSite),
+ REGISTER_SCOPE(IsMultipleBlocks, CallSite),
+ REGISTER_SCOPE(NestedInlines, CallSite),
+ REGISTER_SCOPE(NestedInlineCostEstimate, CallSite),
+ REGISTER_SCOPE(Threshold, CallSite),
+ REGISTER_SCOPE(BasicBlockCount, Function),
+ REGISTER_SCOPE(BlocksReachedFromConditionalInstruction, Function),
+ REGISTER_SCOPE(Uses, Function),
+ REGISTER_SCOPE(EdgeCount, Module),
+ REGISTER_SCOPE(NodeCount, Module),
+ REGISTER_SCOPE(ColdCallSite, CallSite),
+ REGISTER_SCOPE(HotCallSite, CallSite),
+ REGISTER_SCOPE(ACPOFIExtendedFeaturesInitialSize, Function),
+ REGISTER_SCOPE(ACPOFIExtendedFeaturesBlocks, Function),
+ REGISTER_SCOPE(ACPOFIExtendedFeaturesCalls, Function),
+ REGISTER_SCOPE(ACPOFIExtendedFeaturesIsLocal, Function),
+ REGISTER_SCOPE(ACPOFIExtendedFeaturesIsLinkOnceODR, Function),
+ REGISTER_SCOPE(ACPOFIExtendedFeaturesIsLinkOnce, Function),
+ REGISTER_SCOPE(ACPOFIExtendedFeaturesLoops, Function),
+ REGISTER_SCOPE(ACPOFIExtendedFeaturesMaxLoopDepth, Function),
+ REGISTER_SCOPE(ACPOFIExtendedFeaturesMaxDomTreeLevel, Function),
+ REGISTER_SCOPE(ACPOFIExtendedFeaturesPtrArgs, Function),
+ REGISTER_SCOPE(ACPOFIExtendedFeaturesPtrCallee, Function),
+ REGISTER_SCOPE(ACPOFIExtendedFeaturesCallReturnPtr, Function),
+ REGISTER_SCOPE(ACPOFIExtendedFeaturesConditionalBranch, Function),
+ REGISTER_SCOPE(ACPOFIExtendedFeaturesCBwithArg, Function),
+ REGISTER_SCOPE(ACPOFIExtendedFeaturesCallerHeight, Function),
+ REGISTER_SCOPE(ACPOFIExtendedFeaturesCallUsage, Function),
+ REGISTER_SCOPE(ACPOFIExtendedFeaturesIsRecursive, Function),
+ REGISTER_SCOPE(ACPOFIExtendedFeaturesNumCallsiteInLoop, Function),
+ REGISTER_SCOPE(ACPOFIExtendedFeaturesNumOfCallUsesInLoop, Function),
+ REGISTER_SCOPE(ACPOFIExtendedFeaturesEntryBlockFreq, Function),
+ REGISTER_SCOPE(ACPOFIExtendedFeaturesMaxCallsiteBlockFreq, Function),
+ REGISTER_SCOPE(ACPOFIExtendedFeaturesInstructionPerBlock, Function),
+ REGISTER_SCOPE(ACPOFIExtendedFeaturesSuccessorPerBlock, Function),
+ REGISTER_SCOPE(ACPOFIExtendedFeaturesAvgVecInstr, Function),
+ REGISTER_SCOPE(ACPOFIExtendedFeaturesAvgNestedLoopLevel, Function),
+ REGISTER_SCOPE(ACPOFIExtendedFeaturesInstrPerLoop, Function),
+ REGISTER_SCOPE(ACPOFIExtendedFeaturesBlockWithMultipleSuccecorsPerLoop,
+ Function),
+ REGISTER_SCOPE(CallerBlockFreq, CallSite),
+ REGISTER_SCOPE(CallSiteHeight, CallSite),
+ REGISTER_SCOPE(ConstantParam, CallSite),
+ REGISTER_SCOPE(CostEstimate, CallSite),
+ REGISTER_SCOPE(LoopLevel, CallSite),
+ REGISTER_SCOPE(MandatoryKind, CallSite),
+ REGISTER_SCOPE(MandatoryOnly, CallSite),
+ REGISTER_SCOPE(OptCode, CallSite),
+ REGISTER_SCOPE(IsIndirectCall, CallSite),
+ REGISTER_SCOPE(IsInInnerLoop, CallSite),
+ REGISTER_SCOPE(IsMustTailCall, CallSite),
+ REGISTER_SCOPE(IsTailCall, CallSite),
+ };
+#undef REGISTER_SCOPE
+
+#define REGISTER_GROUP(INDEX_NAME, NAME) \
+ { \
+ ACPOCollectFeatures::FeatureIndex::INDEX_NAME, \
+ ACPOCollectFeatures::GroupID::NAME \
+ }
+const std::unordered_map<ACPOCollectFeatures::FeatureIndex,
+ ACPOCollectFeatures::GroupID>
+ ACPOCollectFeatures::FeatureIndexToGroup{
+ REGISTER_GROUP(SROASavings, InlineCostFeatureGroup),
+ REGISTER_GROUP(SROALosses, InlineCostFeatureGroup),
+ REGISTER_GROUP(LoadElimination, InlineCostFeatureGroup),
+ REGISTER_GROUP(CallPenalty, InlineCostFeatureGroup),
+ REGISTER_GROUP(CallArgumentSetup, InlineCostFeatureGroup),
+ REGISTER_GROUP(LoadRelativeIntrinsic, InlineCostFeatureGroup),
+ REGISTER_GROUP(LoweredCallArgSetup, InlineCostFeatureGroup),
+ REGISTER_GROUP(IndirectCallPenalty, InlineCostFeatureGroup),
+ REGISTER_GROUP(JumpTablePenalty, InlineCostFeatureGroup),
+ REGISTER_GROUP(CaseClusterPenalty, InlineCostFeatureGroup),
+ REGISTER_GROUP(SwitchPenalty, InlineCostFeatureGroup),
+ REGISTER_GROUP(UnsimplifiedCommonInstructions, InlineCostFeatureGroup),
+ REGISTER_GROUP(NumLoops, InlineCostFeatureGroup),
+ REGISTER_GROUP(DeadBlocks, InlineCostFeatureGroup),
+ REGISTER_GROUP(SimplifiedInstructions, InlineCostFeatureGroup),
+ REGISTER_GROUP(ConstantArgs, InlineCostFeatureGroup),
+ REGISTER_GROUP(ConstantOffsetPtrArgs, InlineCostFeatureGroup),
+ REGISTER_GROUP(CallSiteCost, InlineCostFeatureGroup),
+ REGISTER_GROUP(ColdCcPenalty, InlineCostFeatureGroup),
+ REGISTER_GROUP(LastCallToStaticBonus, InlineCostFeatureGroup),
+ REGISTER_GROUP(IsMultipleBlocks, InlineCostFeatureGroup),
+ REGISTER_GROUP(NestedInlines, InlineCostFeatureGroup),
+ REGISTER_GROUP(NestedInlineCostEstimate, InlineCostFeatureGroup),
+ REGISTER_GROUP(Threshold, InlineCostFeatureGroup),
+ REGISTER_GROUP(BasicBlockCount, FPIRelated),
+ REGISTER_GROUP(BlocksReachedFromConditionalInstruction, FPIRelated),
+ REGISTER_GROUP(Uses, FPIRelated),
+ REGISTER_GROUP(EdgeCount, EdgeNodeCount),
+ REGISTER_GROUP(NodeCount, EdgeNodeCount),
+ REGISTER_GROUP(ColdCallSite, HotColdCallSite),
+ REGISTER_GROUP(HotCallSite, HotColdCallSite),
+ REGISTER_GROUP(ACPOFIExtendedFeaturesInitialSize,
+ ACPOFIExtendedFeatures),
+ REGISTER_GROUP(ACPOFIExtendedFeaturesBlocks, ACPOFIExtendedFeatures),
+ REGISTER_GROUP(ACPOFIExtendedFeaturesCalls, ACPOFIExtendedFeatures),
+ REGISTER_GROUP(ACPOFIExtendedFeaturesIsLocal, ACPOFIExtendedFeatures),
+ REGISTER_GROUP(ACPOFIExtendedFeaturesIsLinkOnceODR,
+ ACPOFIExtendedFeatures),
+ REGISTER_GROUP(ACPOFIExtendedFeaturesIsLinkOnce,
+ ACPOFIExtendedFeatures),
+ REGISTER_GROUP(ACPOFIExtendedFeaturesLoops, ACPOFIExtendedFeatures),
+ REGISTER_GROUP(ACPOFIExtendedFeaturesMaxLoopDepth,
+ ACPOFIExtendedFeatures),
+ REGISTER_GROUP(ACPOFIExtendedFeaturesMaxDomTreeLevel,
+ ACPOFIExtendedFeatures),
+ REGISTER_GROUP(ACPOFIExtendedFeaturesPtrArgs, ACPOFIExtendedFeatures),
+ REGISTER_GROUP(ACPOFIExtendedFeaturesPtrCallee, ACPOFIExtendedFeatures),
+ REGISTER_GROUP(ACPOFIExtendedFeaturesCallReturnPtr,
+ ACPOFIExtendedFeatures),
+ REGISTER_GROUP(ACPOFIExtendedFeaturesConditionalBranch,
+ ACPOFIExtendedFeatures),
+ REGISTER_GROUP(ACPOFIExtendedFeaturesCBwithArg, ACPOFIExtendedFeatures),
+ REGISTER_GROUP(ACPOFIExtendedFeaturesCallerHeight,
+ ACPOFIExtendedFeatures),
+ REGISTER_GROUP(ACPOFIExtendedFeaturesCallUsage, ACPOFIExtendedFeatures),
+ REGISTER_GROUP(ACPOFIExtendedFeaturesIsRecursive,
+ ACPOFIExtendedFeatures),
+ REGISTER_GROUP(ACPOFIExtendedFeaturesNumCallsiteInLoop,
+ ACPOFIExtendedFeatures),
+ REGISTER_GROUP(ACPOFIExtendedFeaturesNumOfCallUsesInLoop,
+ ACPOFIExtendedFeatures),
+ REGISTER_GROUP(ACPOFIExtendedFeaturesEntryBlockFreq,
+ ACPOFIExtendedFeatures),
+ REGISTER_GROUP(ACPOFIExtendedFeaturesMaxCallsiteBlockFreq,
+ ACPOFIExtendedFeatures),
+ REGISTER_GROUP(ACPOFIExtendedFeaturesInstructionPerBlock,
+ ACPOFIExtendedFeatures),
+ REGISTER_GROUP(ACPOFIExtendedFeaturesSuccessorPerBlock,
+ ACPOFIExtendedFeatures),
+ REGISTER_GROUP(ACPOFIExtendedFeaturesAvgVecInstr,
+ ACPOFIExtendedFeatures),
+ REGISTER_GROUP(ACPOFIExtendedFeaturesAvgNestedLoopLevel,
+ ACPOFIExtendedFeatures),
+ REGISTER_GROUP(ACPOFIExtendedFeaturesInstrPerLoop,
+ ACPOFIExtendedFeatures),
+ REGISTER_GROUP(ACPOFIExtendedFeaturesBlockWithMultipleSuccecorsPerLoop,
+ ACPOFIExtendedFeatures),
+ };
+#undef REGISTER_GROUP
+
+// Given a map that may not be one to one. Returns the inverse mapping.
+// EX: Input: A -> 1, B -> 1
+// Output: 1 -> A, 1 -> B
+template <class K, class V>
+static std::multimap<K, V> inverseMap(std::unordered_map<V, K> Map) {
+ std::multimap<K, V> InverseMap;
+ for (const auto &It : Map) {
+ InverseMap.insert(std::pair<K, V>(It.second, It.first));
+ }
+ return InverseMap;
+}
+
+const std::multimap<ACPOCollectFeatures::GroupID,
+ ACPOCollectFeatures::FeatureIndex>
+ ACPOCollectFeatures::GroupToFeatureIndices{
+ inverseMap<ACPOCollectFeatures::GroupID,
+ ACPOCollectFeatures::FeatureIndex>(FeatureIndexToGroup)};
+
+const std::multimap<ACPOCollectFeatures::Scope,
+ ACPOCollectFeatures::FeatureIndex>
+ ACPOCollectFeatures::ScopeToFeatureIndices{
+ inverseMap<ACPOCollectFeatures::Scope,
+ ACPOCollectFeatures::FeatureIndex>(FeatureIndexToScope)};
+
+#define REGISTER_FUNCTION(INDEX_NAME, NAME) \
+ { ACPOCollectFeatures::FeatureIndex::INDEX_NAME, NAME }
+const std::unordered_map<ACPOCollectFeatures::FeatureIndex,
+ ACPOCollectFeatures::CalculateFeatureFunction>
+ ACPOCollectFeatures::CalculateFeatureMap{
+ REGISTER_FUNCTION(SROASavings, calculateInlineCostFeatures),
+ REGISTER_FUNCTION(SROALosses, calculateInlineCostFeatures),
+ REGISTER_FUNCTION(LoadElimination, calculateInlineCostFeatures),
+ REGISTER_FUNCTION(CallPenalty, calculateInlineCostFeatures),
+ REGISTER_FUNCTION(CallArgumentSetup, calculateInlineCostFeatures),
+ REGISTER_FUNCTION(LoadRelativeIntrinsic, calculateInlineCostFeatures),
+ REGISTER_FUNCTION(LoweredCallArgSetup, calculateInlineCostFeatures),
+ REGISTER_FUNCTION(IndirectCallPenalty, calculateInlineCostFeatures),
+ REGISTER_FUNCTION(JumpTablePenalty, calculateInlineCostFeatures),
+ REGISTER_FUNCTION(CaseClusterPenalty, calculateInlineCostFeatures),
+ REGISTER_FUNCTION(SwitchPenalty, calculateInlineCostFeatures),
+ REGISTER_FUNCTION(UnsimplifiedCommonInstructions,
+ calculateInlineCostFeatures),
+ REGISTER_FUNCTION(NumLoops, calculateInlineCostFeatures),
+ REGISTER_FUNCTION(DeadBlocks, calculateInlineCostFeatures),
+ REGISTER_FUNCTION(SimplifiedInstructions, calculateInlineCostFeatures),
+ REGISTER_FUNCTION(ConstantArgs, calculateInlineCostFeatures),
+ REGISTER_FUNCTION(ConstantOffsetPtrArgs, calculateInlineCostFeatures),
+ REGISTER_FUNCTION(CallSiteCost, calculateInlineCostFeatures),
+ REGISTER_FUNCTION(ColdCcPenalty, calculateInlineCostFeatures),
+ REGISTER_FUNCTION(LastCallToStaticBonus, calculateInlineCostFeatures),
+ REGISTER_FUNCTION(IsMultipleBlocks, calculateInlineCostFeatures),
+ REGISTER_FUNCTION(NestedInlines, calculateInlineCostFeatures),
+ REGISTER_FUNCTION(NestedInlineCostEstimate,
+ calculateInlineCostFeatures),
+ REGISTER_FUNCTION(Threshold, calculateInlineCostFeatures),
+ REGISTER_FUNCTION(BasicBlockCount, calculateFPIRelated),
+ REGISTER_FUNCTION(BlocksReachedFromConditionalInstruction,
+ calculateFPIRelated),
+ REGISTER_FUNCTION(Uses, calculateFPIRelated),
+ REGISTER_FUNCTION(EdgeCount, calculateEdgeNodeCount),
+ REGISTER_FUNCTION(NodeCount, calculateEdgeNodeCount),
+ REGISTER_FUNCTION(ColdCallSite, calculateHotColdCallSite),
+ REGISTER_FUNCTION(HotCallSite, calculateHotColdCallSite),
+ REGISTER_FUNCTION(ACPOFIExtendedFeaturesInitialSize,
+ calculateACPOFIExtendedFeaturesFeatures),
+ REGISTER_FUNCTION(ACPOFIExtendedFeaturesBlocks,
+ calculateACPOFIExtendedFeaturesFeatures),
+ REGISTER_FUNCTION(ACPOFIExtendedFeaturesCalls,
+ calculateACPOFIExtendedFeaturesFeatures),
+ REGISTER_FUNCTION(ACPOFIExtendedFeaturesIsLocal,
+ calculateACPOFIExtendedFeaturesFeatures),
+ REGISTER_FUNCTION(ACPOFIExtendedFeaturesIsLinkOnceODR,
+ calculateACPOFIExtendedFeaturesFeatures),
+ REGISTER_FUNCTION(ACPOFIExtendedFeaturesIsLinkOnce,
+ calculateACPOFIExtendedFeaturesFeatures),
+ REGISTER_FUNCTION(ACPOFIExtendedFeaturesLoops,
+ calculateACPOFIExtendedFeaturesFeatures),
+ REGISTER_FUNCTION(ACPOFIExtendedFeaturesMaxLoopDepth,
+ calculateACPOFIExtendedFeaturesFeatures),
+ REGISTER_FUNCTION(ACPOFIExtendedFeaturesMaxDomTreeLevel,
+ calculateACPOFIExtendedFeaturesFeatures),
+ REGISTER_FUNCTION(ACPOFIExtendedFeaturesPtrArgs,
+ calculateACPOFIExtendedFeaturesFeatures),
+ REGISTER_FUNCTION(ACPOFIExtendedFeaturesPtrCallee,
+ calculateACPOFIExtendedFeaturesFeatures),
+ REGISTER_FUNCTION(ACPOFIExtendedFeaturesCallReturnPtr,
+ calculateACPOFIExtendedFeaturesFeatures),
+ REGISTER_FUNCTION(ACPOFIExtendedFeaturesConditionalBranch,
+ calculateACPOFIExtendedFeaturesFeatures),
+ REGISTER_FUNCTION(ACPOFIExtendedFeaturesCBwithArg,
+ calculateACPOFIExtendedFeaturesFeatures),
+ REGISTER_FUNCTION(ACPOFIExtendedFeaturesCallerHeight,
+ calculateACPOFIExtendedFeaturesFeatures),
+ REGISTER_FUNCTION(ACPOFIExtendedFeaturesCallUsage,
+ calculateACPOFIExtendedFeaturesFeatures),
+ REGISTER_FUNCTION(ACPOFIExtendedFeaturesIsRecursive,
+ calculateACPOFIExtendedFeaturesFeatures),
+ REGISTER_FUNCTION(ACPOFIExtendedFeaturesNumCallsiteInLoop,
+ calculateACPOFIExtendedFeaturesFeatures),
+ REGISTER_FUNCTION(ACPOFIExtendedFeaturesNumOfCallUsesInLoop,
+ calculateACPOFIExtendedFeaturesFeatures),
+ REGISTER_FUNCTION(ACPOFIExtendedFeaturesEntryBlockFreq,
+ calculateACPOFIExtendedFeaturesFeatures),
+ REGISTER_FUNCTION(ACPOFIExtendedFeaturesMaxCallsiteBlockFreq,
+ calculateACPOFIExtendedFeaturesFeatures),
+ REGISTER_FUNCTION(ACPOFIExtendedFeaturesInstructionPerBlock,
+ calculateACPOFIExtendedFeaturesFeatures),
+ REGISTER_FUNCTION(ACPOFIExtendedFeaturesSuccessorPerBlock,
+ calculateACPOFIExtendedFeaturesFeatures),
+ REGISTER_FUNCTION(ACPOFIExtendedFeaturesAvgVecInstr,
+ calculateACPOFIExtendedFeaturesFeatures),
+ REGISTER_FUNCTION(ACPOFIExtendedFeaturesAvgNestedLoopLevel,
+ calculateACPOFIExtendedFeaturesFeatures),
+ REGISTER_FUNCTION(ACPOFIExtendedFeaturesInstrPerLoop,
+ calculateACPOFIExtendedFeaturesFeatures),
+ REGISTER_FUNCTION(
+ ACPOFIExtendedFeaturesBlockWithMultipleSuccecorsPerLoop,
+ calculateACPOFIExtendedFeaturesFeatures),
+ REGISTER_FUNCTION(CallerBlockFreq, calculateCallerBlockFreq),
+ REGISTER_FUNCTION(CallSiteHeight, calculateCallSiteHeight),
+ REGISTER_FUNCTION(ConstantParam, calculateConstantParam),
+ REGISTER_FUNCTION(CostEstimate, calculateCostEstimate),
+ REGISTER_FUNCTION(LoopLevel, calculateLoopLevel),
+ REGISTER_FUNCTION(MandatoryKind, calculateMandatoryKind),
+ REGISTER_FUNCTION(MandatoryOnly, calculateMandatoryOnly),
+ REGISTER_FUNCTION(OptCode, calculateOptCode),
+ REGISTER_FUNCTION(IsIndirectCall, calculateIsIndirectCall),
+ REGISTER_FUNCTION(IsInInnerLoop, calculateIsInInnerLoop),
+ REGISTER_FUNCTION(IsMustTailCall, calculateIsMustTailCall),
+ REGISTER_FUNCTION(IsTailCall, calculateIsTailCall),
+ };
+#undef REGISTER_FUNCTION
+
+std::map<const Function *, unsigned> ACPOCollectFeatures::FunctionLevels{};
+
+ACPOCollectFeatures::ACPOCollectFeatures() {}
+
+ACPOCollectFeatures::ACPOCollectFeatures(
+ ACPOCollectFeatures::FeatureInfo GlobalInfo)
+ : GlobalFeatureInfo(GlobalInfo) {
+ assert(GlobalFeatureInfo.Idx == FeatureIndex::NumOfFeatures &&
+ "When setting glboal FeatureInfo the Idx should always be "
+ "NumOfFeatures");
+}
+
+ACPOCollectFeatures::~ACPOCollectFeatures() {}
+
+void ACPOCollectFeatures::setFeatureValue(ACPOCollectFeatures::FeatureIndex Idx,
+ std::string Val) {
+ FeatureToValue[Idx] = Val;
+}
+
+void ACPOCollectFeatures::setFeatureInfo(
+ ACPOCollectFeatures::FeatureIndex Idx,
+ ACPOCollectFeatures::FeatureInfo Info) {
+ assert(
+ (Info.Idx == ACPOCollectFeatures::FeatureIndex::NumOfFeatures ||
+ Info.Idx == Idx || getFeatureGroup(Info.Idx) == getFeatureGroup(Idx)) &&
+ "When setting FeatureToInfo map the key and value pair should both refer "
+ "to the same Feature or the FeatureInfo.Idx should be NumOfFeatures.");
+ FeatureToInfo[Idx] = Info;
+}
+
+void ACPOCollectFeatures::setFeatureValueAndInfo(
+ ACPOCollectFeatures::FeatureIndex Idx,
+ ACPOCollectFeatures::FeatureInfo Info, std::string Val) {
+ setFeatureValue(Idx, Val);
+ setFeatureInfo(Idx, Info);
+}
+
+void ACPOCollectFeatures::setGlobalFeatureInfo(
+ ACPOCollectFeatures::FeatureInfo &Info) {
+ assert(Info.Idx == FeatureIndex::NumOfFeatures &&
+ "When setting glboal FeatureInfo the Idx should always be "
+ "NumOfFeatures");
+ GlobalFeatureInfo = Info;
+}
+
+std::string
+ACPOCollectFeatures::getFeature(ACPOCollectFeatures::FeatureIndex Idx) const {
+ assert(registeredFeature(Idx) && "Feature not registered");
+ return FeatureToValue.find(Idx)->second;
+}
+
+std::string
+ACPOCollectFeatures::getFeatureName(ACPOCollectFeatures::FeatureIndex Idx) {
+ return FeatureIndexToName.find(Idx)->second;
+}
+
+ACPOCollectFeatures::GroupID
+ACPOCollectFeatures::getFeatureGroup(ACPOCollectFeatures::FeatureIndex Idx) {
+ return FeatureIndexToGroup.find(Idx)->second;
+}
+
+ACPOCollectFeatures::Scope
+ACPOCollectFeatures::getFeatureScope(ACPOCollectFeatures::FeatureIndex Idx) {
+ return FeatureIndexToScope.find(Idx)->second;
+}
+
+std::set<ACPOCollectFeatures::FeatureIndex>
+ACPOCollectFeatures::getGroupFeatures(ACPOCollectFeatures::GroupID Group) {
+ std::set<ACPOCollectFeatures::FeatureIndex> FeatureIndices;
+ auto Range = GroupToFeatureIndices.equal_range(Group);
+ for (auto It = Range.first; It != Range.second; ++It) {
+ FeatureIndices.insert(It->second);
+ }
+ return FeatureIndices;
+}
+
+std::set<ACPOCollectFeatures::FeatureIndex>
+ACPOCollectFeatures::getScopeFeatures(ACPOCollectFeatures::Scope S) {
+ std::set<ACPOCollectFeatures::FeatureIndex> FeatureIndices;
+ auto Range = ScopeToFeatureIndices.equal_range(S);
+ for (auto It = Range.first; It != Range.second; ++It) {
+ FeatureIndices.insert(It->second);
+ }
+ return FeatureIndices;
+}
+
+bool ACPOCollectFeatures::containsFeature(
+ ACPOCollectFeatures::FeatureIndex Idx) {
+ return FeatureToValue.count(Idx) > 0;
+}
+
+bool ACPOCollectFeatures::containsFeature(
+ ACPOCollectFeatures::GroupID GroupID) {
+ for (auto FeatureIdx : getGroupFeatures(GroupID)) {
+ if (!containsFeature(FeatureIdx))
+ return false;
+ }
+ return true;
+}
+
+void ACPOCollectFeatures::clearFeatureValueMap() { FeatureToValue.clear(); }
+
+bool ACPOCollectFeatures::registeredFeature(
+ ACPOCollectFeatures::FeatureIndex Idx) const {
+ return FeatureToValue.find(Idx) != FeatureToValue.end();
+}
+
+void calculateFPIRelated(ACPOCollectFeatures &ACF,
+ const ACPOCollectFeatures::FeatureInfo &Info) {
+ assert(Info.Idx == ACPOCollectFeatures::FeatureIndex::NumOfFeatures ||
+ Info.Idx == ACPOCollectFeatures::FeatureIndex::BasicBlockCount);
+
+ auto *FAM = Info.Managers.FAM;
+ auto *F = Info.SI.F;
+
+ assert(F && FAM && "Function or FAM is nullptr");
+
+ auto &FPI = FAM->getResult<FunctionPropertiesAnalysis>(*F);
+
+ ACF.setFeatureValueAndInfo(ACPOCollectFeatures::FeatureIndex::BasicBlockCount,
+ Info, std::to_string(FPI.BasicBlockCount));
+ ACF.setFeatureValueAndInfo(
+ ACPOCollectFeatures::FeatureIndex::
+ BlocksReachedFromConditionalInstruction,
+ Info, std::to_string(FPI.BlocksReachedFromConditionalInstruction));
+ ACF.setFeatureValueAndInfo(ACPOCollectFeatures::FeatureIndex::Uses, Info,
+ std::to_string(FPI.Uses));
+}
+
+void calculateCallerBlockFreq(ACPOCollectFeatures &ACF,
+ const ACPOCollectFeatures::FeatureInfo &Info) {
+ assert(Info.Idx == ACPOCollectFeatures::FeatureIndex::NumOfFeatures ||
+ Info.Idx == ACPOCollectFeatures::FeatureIndex::CallerBlockFreq);
+
+ auto *CB = Info.SI.CB;
+ auto *FAM = Info.Managers.FAM;
+
+ assert(CB && FAM && "CallSite or FAM is nullptr");
+
+ Function *F = CB->getCaller();
+ BasicBlock *BB = CB->getParent();
+ BlockFrequencyInfo &BFI = FAM->getResult<BlockFrequencyAnalysis>(*F);
+
+ uint64_t CallerBlockFreq = BFI.getBlockFreq(BB).getFrequency();
+ // The model uses signed 64-bit thus we need to take care of int overflow.
+ if (CallerBlockFreq >= std::numeric_limits<int64_t>::max()) {
+ CallerBlockFreq = std::numeric_limits<int64_t>::max() - 1;
+ }
+
+ ACF.setFeatureValueAndInfo(ACPOCollectFeatures::FeatureIndex::CallerBlockFreq,
+ Info, std::to_string(CallerBlockFreq));
+}
+
+void calculateCallSiteHeight(ACPOCollectFeatures &ACF,
+ const ACPOCollectFeatures::FeatureInfo &Info) {
+ assert(Info.Idx == ACPOCollectFeatures::FeatureIndex::NumOfFeatures ||
+ Info.Idx == ACPOCollectFeatures::FeatureIndex::CallSiteHeight);
+
+ // Check if we already calculated the values.
+ if (ACF.containsFeature(ACPOCollectFeatures::FeatureIndex::CallSiteHeight))
+ return;
+
+ auto *CB = Info.SI.CB;
+ auto *IA = Info.OI.IA;
+
+ assert(CB && IA && "CallSite or IA is nullptr");
+
+ if (IA) {
+ ACF.setFeatureValueAndInfo(ACPOCollectFeatures::FeatureIndex::CallSiteHeight,
+ Info, std::to_string(IA->getCallSiteHeight(CB)));
+ return;
+ }
+ LLVM_DEBUG(dbgs() << "IA was nullptr & callsite height is not set!" << "\n");
+}
+
+void calculateConstantParam(ACPOCollectFeatures &ACF,
+ const ACPOCollectFeatures::FeatureInfo &Info) {
+ assert(Info.Idx == ACPOCollectFeatures::FeatureIndex::NumOfFeatures ||
+ Info.Idx == ACPOCollectFeatures::FeatureIndex::ConstantParam);
+
+ // Check if we already calculated the values.
+ if (ACF.containsFeature(ACPOCollectFeatures::FeatureIndex::ConstantParam))
+ return;
+
+ auto *CB = Info.SI.CB;
+ assert(CB && "CallSite is nullptr");
+
+ size_t NrCtantParams = 0;
+ for (auto I = CB->arg_begin(), E = CB->arg_end(); I != E; ++I) {
+ NrCtantParams += (isa<Constant>(*I));
+ }
+
+ ACF.setFeatureValueAndInfo(ACPOCollectFeatures::FeatureIndex::ConstantParam,
+ Info, std::to_string(NrCtantParams));
+}
+
+void calculateCostEstimate(ACPOCollectFeatures &ACF,
+ const ACPOCollectFeatures::FeatureInfo &Info) {
+ assert(Info.Idx == ACPOCollectFeatures::FeatureIndex::NumOfFeatures ||
+ Info.Idx == ACPOCollectFeatures::FeatureIndex::CostEstimate);
+
+ // Check if we already calculated the values.
+ if (ACF.containsFeature(ACPOCollectFeatures::FeatureIndex::CostEstimate))
+ return;
+
+ auto *CB = Info.SI.CB;
+ auto *FAM = Info.Managers.FAM;
+
+ assert(CB && FAM && "CallBase or FAM is nullptr");
+
+ auto &Callee = *CB->getCalledFunction();
+ auto &TIR = FAM->getResult<TargetIRAnalysis>(Callee);
+
+ auto GetAssumptionCache = [&](Function &F) -> AssumptionCache & {
+ return FAM->getResult<AssumptionAnalysis>(F);
+ };
+
+ int CostEstimate = 0;
+ auto IsCallSiteInlinable =
+ llvm::getInliningCostEstimate(*CB, TIR, GetAssumptionCache);
+ if (IsCallSiteInlinable)
+ CostEstimate = *IsCallSiteInlinable;
+
+ ACF.setFeatureValueAndInfo(ACPOCollectFeatures::FeatureIndex::CostEstimate,
+ Info, std::to_string(CostEstimate));
+}
+
+int64_t getLocalCalls(Function &F, FunctionAnalysisManager &FAM) {
+ return FAM.getResult<FunctionPropertiesAnalysis>(F)
+ .DirectCallsToDefinedFunctions;
+}
+
+void calculateEdgeNodeCount(ACPOCollectFeatures &ACF,
+ const ACPOCollectFeatures::FeatureInfo &Info) {
+ assert(Info.Idx == ACPOCollectFeatures::FeatureIndex::NumOfFeatures ||
+ ACPOCollectFeatures::getFeatureGroup(Info.Idx) ==
+ ACPOCollectFeatures::GroupID::EdgeNodeCount);
+
+ // Check if we already calculated the values.
+ if (ACF.containsFeature(ACPOCollectFeatures::GroupID::EdgeNodeCount))
+ return;
+
+ auto *M = Info.SI.M;
+ auto *FAM = Info.Managers.FAM;
+
+ assert(M && FAM && "Module or FAM is nullptr");
+
+ int NodeCount = 0;
+ int EdgeCount = 0;
+ for (auto &F : *M)
+ if (!F.isDeclaration()) {
+ ++NodeCount;
+ EdgeCount += getLocalCalls(F, *FAM);
+ }
+
+ std::string EdgeCountStr = std::to_string(EdgeCount);
+ std::string NodeCountStr = std::to_string(NodeCount);
+ ACF.setFeatureValueAndInfo(ACPOCollectFeatures::FeatureIndex::EdgeCount, Info,
+ EdgeCountStr);
+ ACF.setFeatureValueAndInfo(ACPOCollectFeatures::FeatureIndex::NodeCount, Info,
+ NodeCountStr);
+}
+
+void calculateHotColdCallSite(ACPOCollectFeatures &ACF,
+ const ACPOCollectFeatures::FeatureInfo &Info) {
+ assert(Info.Idx == ACPOCollectFeatures::FeatureIndex::NumOfFeatures ||
+ ACPOCollectFeatures::getFeatureGroup(Info.Idx) ==
+ ACPOCollectFeatures::GroupID::HotColdCallSite);
+
+ // Check if we already calculated the values.
+ if (ACF.containsFeature(ACPOCollectFeatures::GroupID::HotColdCallSite))
+ return;
+
+ auto *CB = Info.SI.CB;
+ auto *FAM = Info.Managers.FAM;
+
+ assert(CB && FAM && "Module or FAM is nullptr");
+
+ auto &Caller = *CB->getCaller();
+ auto GetBFI = [&](Function &F) -> BlockFrequencyInfo & {
+ return FAM->getResult<BlockFrequencyAnalysis>(F);
+ };
+
+ BlockFrequencyInfo &CallerBFI = GetBFI(Caller);
+ const BranchProbability ColdProb(2, 100);
+ auto *CallSiteBB = CB->getParent();
+ auto CallSiteFreq = CallerBFI.getBlockFreq(CallSiteBB);
+ auto CallerEntryFreq =
+ CallerBFI.getBlockFreq(&(CB->getCaller()->getEntryBlock()));
+ bool ColdCallSite = CallSiteFreq < CallerEntryFreq * ColdProb;
+ auto CallerEntryFreqHot = CallerBFI.getEntryFreq();
+ bool HotCallSite = (CallSiteFreq.getFrequency() >= CallerEntryFreqHot * 60);
+
+ ACF.setFeatureValueAndInfo(ACPOCollectFeatures::FeatureIndex::ColdCallSite,
+ Info, std::to_string(ColdCallSite));
+ ACF.setFeatureValueAndInfo(ACPOCollectFeatures::FeatureIndex::HotCallSite,
+ Info, std::to_string(HotCallSite));
+}
+
+void calculateLoopLevel(ACPOCollectFeatures &ACF,
+ const ACPOCollectFeatures::FeatureInfo &Info) {
+ assert(Info.Idx == ACPOCollectFeatures::FeatureIndex::NumOfFeatures ||
+ Info.Idx == ACPOCollectFeatures::FeatureIndex::LoopLevel);
+
+ // Check if we already calculated the values.
+ if (ACF.containsFeature(ACPOCollectFeatures::FeatureIndex::LoopLevel))
+ return;
+
+ auto *CB = Info.SI.CB;
+ auto *FAM = Info.Managers.FAM;
+
+ assert(CB && FAM && "CallBase or FAM is nullptr");
+
+ Function *F = CB->getCaller();
+ BasicBlock *BB = CB->getParent();
+ LoopInfo &LI = FAM->getResult<LoopAnalysis>(*F);
+
+ std::string OptCode = std::to_string(CB->getOpcode());
+ ACF.setFeatureValueAndInfo(ACPOCollectFeatures::FeatureIndex::LoopLevel, Info,
+ std::to_string(LI.getLoopDepth(BB)));
+}
+
+InlineAdvisor::MandatoryInliningKind
+ACPOCollectFeatures::getMandatoryKind(CallBase &CB,
+ FunctionAnalysisManager &FAM,
+ OptimizationRemarkEmitter &ORE) {
+ return InlineAdvisor::getMandatoryKind(CB, FAM, ORE);
+}
+
+void calculateMandatoryKind(ACPOCollectFeatures &ACF,
+ const ACPOCollectFeatures::FeatureInfo &Info) {
+ assert(Info.Idx == ACPOCollectFeatures::FeatureIndex::NumOfFeatures ||
+ Info.Idx == ACPOCollectFeatures::FeatureIndex::MandatoryKind);
+
+ // Check if we already calculated the values.
+ if (ACF.containsFeature(ACPOCollectFeatures::FeatureIndex::MandatoryKind))
+ return;
+
+ auto *CB = Info.SI.CB;
+ auto *FAM = Info.Managers.FAM;
+
+ assert(CB && FAM && "CallBase or FAM is nullptr");
+
+ auto &Caller = *CB->getCaller();
+ auto &ORE = FAM->getResult<OptimizationRemarkEmitterAnalysis>(Caller);
+ auto MandatoryKind = ACPOCollectFeatures::getMandatoryKind(*CB, *FAM, ORE);
+
+ ACF.setFeatureValueAndInfo(ACPOCollectFeatures::FeatureIndex::MandatoryKind,
+ Info, std::to_string((int)MandatoryKind));
+}
+
+void calculateMandatoryOnly(ACPOCollectFeatures &ACF,
+ const ACPOCollectFeatures::FeatureInfo &Info) {
+ assert(Info.Idx == ACPOCollectFeatures::FeatureIndex::NumOfFeatures ||
+ Info.Idx == ACPOCollectFeatures::FeatureIndex::MandatoryOnly);
+
+ // Check if we already calculated the values.
+ if (ACF.containsFeature(ACPOCollectFeatures::FeatureIndex::MandatoryOnly))
+ return;
+
+ ACF.setFeatureValueAndInfo(ACPOCollectFeatures::FeatureIndex::MandatoryOnly,
+ Info, std::to_string((int)Info.OI.MandatoryOnly));
+}
+
+void calculateOptCode(ACPOCollectFeatures &ACF,
+ const ACPOCollectFeatures::FeatureInfo &Info) {
+ assert(Info.Idx == ACPOCollectFeatures::FeatureIndex::NumOfFeatures ||
+ Info.Idx == ACPOCollectFeatures::FeatureIndex::OptCode);
+
+ // Check if we already calculated the values.
+ if (ACF.containsFeature(ACPOCollectFeatures::FeatureIndex::OptCode))
+ return;
+
+ auto *CB = Info.SI.CB;
+
+ assert(CB && "CallBase is nullptr");
+
+ std::string OptCode = std::to_string(CB->getOpcode());
+ ACF.setFeatureValueAndInfo(ACPOCollectFeatures::FeatureIndex::OptCode, Info,
+ OptCode);
+}
+
+void calculateInlineCostFeatures(ACPOCollectFeatures &ACF,
+ const ACPOCollectFeatures::FeatureInfo &Info) {
+ assert(Info.Idx == ACPOCollectFeatures::FeatureIndex::NumOfFeatures ||
+ (ACPOCollectFeatures::getFeatureGroup(Info.Idx) ==
+ ACPOCollectFeatures::GroupID::InlineCostFeatureGroup));
+
+ // Check if we already calculated the values.
+ if (ACF.containsFeature(ACPOCollectFeatures::GroupID::InlineCostFeatureGroup))
+ return;
+
+ auto *CB = Info.SI.CB;
+ auto *FAM = Info.Managers.FAM;
+
+ assert(CB && FAM && "CallBase or FAM is nullptr");
+
+ auto &Callee = *CB->getCalledFunction();
+ auto &TIR = FAM->getResult<TargetIRAnalysis>(Callee);
+
+ auto GetAssumptionCache = [&](Function &F) -> AssumptionCache & {
+ return FAM->getResult<AssumptionAnalysis>(F);
+ };
+
+ const auto CostFeaturesOpt =
+ getInliningCostFeatures(*CB, TIR, GetAssumptionCache);
+
+ for (auto Idx =
+ ACPOCollectFeatures::FeatureIndex::InlineCostFeatureGroupBegin + 1;
+ Idx != ACPOCollectFeatures::FeatureIndex::InlineCostFeatureGroupEnd;
+ ++Idx) {
+ size_t TmpIdx =
+ static_cast<size_t>(Idx) -
+ static_cast<size_t>(
+ ACPOCollectFeatures::FeatureIndex::InlineCostFeatureGroupBegin) -
+ 1;
+ ACF.setFeatureValueAndInfo(
+ Idx, Info,
+ std::to_string(CostFeaturesOpt ? CostFeaturesOpt.value()[TmpIdx] : 0));
+ }
+}
+
+static void
+checkValidFFCache(Function &F,
+ struct ACPOFIExtendedFeatures::FunctionFeatures &FF,
+ DominatorTree &Tree, TargetTransformInfo &TTI, LoopInfo &LI,
+ bool &ValidSize, bool &ValidLoop, bool &ValidTree) {
+ std::optional<size_t> SizeCache = ACPOFIModel::getCachedSize(
+ &F, ACPOFIExtendedFeatures::NamedFeatureIndex::InitialSize);
+ auto TTIAnalysisCache = ACPOFIModel::getTTICachedAnalysis(&F);
+ if (SizeCache && TTIAnalysisCache == &TTI) {
+ ValidSize = true;
+ }
+
+ std::optional<size_t> MaxDomTreeLevelCache = ACPOFIModel::getCachedSize(
+ &F, ACPOFIExtendedFeatures::NamedFeatureIndex::MaxDomTreeLevel);
+ auto DomCache = ACPOFIModel::getDomCachedAnalysis(&F);
+ if (MaxDomTreeLevelCache && DomCache == &Tree) {
+ ValidTree = true;
+ }
+
+ std::optional<size_t> LoopNumCache = ACPOFIModel::getCachedSize(
+ &F, ACPOFIExtendedFeatures::NamedFeatureIndex::Loops);
+ auto LIAnalysisCache = ACPOFIModel::getLICachedAnalysis(&F);
+ if (LoopNumCache && LIAnalysisCache == &LI) {
+ ValidLoop = true;
+ }
+}
+
+static void getCachedFF(Function &F,
+ struct ACPOFIExtendedFeatures::FunctionFeatures &FF,
+ DominatorTree &Tree, TargetTransformInfo &TTI,
+ LoopInfo &LI) {
+ std::optional<size_t> SizeCache = ACPOFIModel::getCachedSize(
+ &F, ACPOFIExtendedFeatures::NamedFeatureIndex::InitialSize);
+ auto TTIAnalysisCache = ACPOFIModel::getTTICachedAnalysis(&F);
+ if (SizeCache && TTIAnalysisCache == &TTI) {
+ FF[ACPOFIExtendedFeatures::NamedFeatureIndex::InitialSize] =
+ SizeCache.value();
+ }
+
+ std::optional<size_t> MaxDomTreeLevelCache = ACPOFIModel::getCachedSize(
+ &F, ACPOFIExtendedFeatures::NamedFeatureIndex::MaxDomTreeLevel);
+ auto DomCache = ACPOFIModel::getDomCachedAnalysis(&F);
+ if (MaxDomTreeLevelCache && DomCache == &Tree) {
+ FF[ACPOFIExtendedFeatures::NamedFeatureIndex::MaxDomTreeLevel] =
+ MaxDomTreeLevelCache.value();
+ }
+
+ std::optional<size_t> LoopNumCache = ACPOFIModel::getCachedSize(
+ &F, ACPOFIExtendedFeatures::NamedFeatureIndex::Loops);
+ auto LIAnalysisCache = ACPOFIModel::getLICachedAnalysis(&F);
+ if (LoopNumCache && LIAnalysisCache == &LI) {
+ FF[ACPOFIExtendedFeatures::NamedFeatureIndex::Loops] = LoopNumCache.value();
+ FF[ACPOFIExtendedFeatures::NamedFeatureIndex::MaxLoopDepth] =
+ ACPOFIModel::getCachedSize(
+ &F, ACPOFIExtendedFeatures::NamedFeatureIndex::MaxLoopDepth)
+ .value();
+ if (LoopNumCache.value() != 0) {
+ FF[ACPOFIExtendedFeatures::NamedFloatFeatureIndex::InstrPerLoop] =
+ ACPOFIModel::getCachedFloat(
+ &F, ACPOFIExtendedFeatures::NamedFloatFeatureIndex::InstrPerLoop)
+ .value();
+ FF[ACPOFIExtendedFeatures::NamedFloatFeatureIndex::
+ BlockWithMultipleSuccecorsPerLoop] =
+ ACPOFIModel::getCachedFloat(
+ &F, ACPOFIExtendedFeatures::NamedFloatFeatureIndex::
+ BlockWithMultipleSuccecorsPerLoop)
+ .value();
+ FF[ACPOFIExtendedFeatures::NamedFloatFeatureIndex::AvgNestedLoopLevel] =
+ ACPOFIModel::getCachedFloat(
+ &F, ACPOFIExtendedFeatures::NamedFloatFeatureIndex::
+ AvgNestedLoopLevel)
+ .value();
+ }
+ }
+}
+
+static void updateCachedFF(Function &F,
+ struct ACPOFIExtendedFeatures::FunctionFeatures &FF,
+ DominatorTree &Tree, TargetTransformInfo &TTI,
+ LoopInfo &LI) {
+ ACPOFIModel::insertSizeCache(
+ &F, ACPOFIExtendedFeatures::NamedFeatureIndex::InitialSize,
+ FF[ACPOFIExtendedFeatures::NamedFeatureIndex::InitialSize]);
+ ACPOFIModel::insertAnalysisCache(&F, &TTI);
+ ACPOFIModel::insertSizeCache(
+ &F, ACPOFIExtendedFeatures::NamedFeatureIndex::MaxDomTreeLevel,
+ FF[ACPOFIExtendedFeatures::NamedFeatureIndex::MaxDomTreeLevel]);
+ ACPOFIModel::insertAnalysisCache(&F, &Tree);
+ ACPOFIModel::insertSizeCache(
+ &F, ACPOFIExtendedFeatures::NamedFeatureIndex::Loops,
+ FF[ACPOFIExtendedFeatures::NamedFeatureIndex::Loops]);
+ ACPOFIModel::insertSizeCache(
+ &F, ACPOFIExtendedFeatures::NamedFeatureIndex::MaxLoopDepth,
+ FF[ACPOFIExtendedFeatures::NamedFeatureIndex::MaxLoopDepth]);
+ ACPOFIModel::insertFloatCache(
+ &F, ACPOFIExtendedFeatures::NamedFloatFeatureIndex::InstrPerLoop,
+ FF[ACPOFIExtendedFeatures::NamedFloatFeatureIndex::InstrPerLoop]);
+ ACPOFIModel::insertFloatCache(
+ &F,
+ ACPOFIExtendedFeatures::NamedFloatFeatureIndex::
+ BlockWithMultipleSuccecorsPerLoop,
+ FF[ACPOFIExtendedFeatures::NamedFloatFeatureIndex::
+ BlockWithMultipleSuccecorsPerLoop]);
+ ACPOFIModel::insertFloatCache(
+ &F, ACPOFIExtendedFeatures::NamedFloatFeatureIndex::AvgNestedLoopLevel,
+ FF[ACPOFIExtendedFeatures::NamedFloatFeatureIndex::AvgNestedLoopLevel]);
+ ACPOFIModel::insertAnalysisCache(&F, &LI);
+}
+
+void calculateACPOFIExtendedFeaturesFeatures(
+ ACPOCollectFeatures &ACF, const ACPOCollectFeatures::FeatureInfo &Info) {
+ assert(Info.Idx == ACPOCollectFeatures::FeatureIndex::NumOfFeatures ||
+ ACPOCollectFeatures::getFeatureGroup(Info.Idx) ==
+ ACPOCollectFeatures::GroupID::ACPOFIExtendedFeatures);
+
+ // Check if we already calculated the values.
+ if (ACF.containsFeature(ACPOCollectFeatures::GroupID::ACPOFIExtendedFeatures))
+ return;
+
+ auto F = Info.SI.F;
+ auto *FAM = Info.Managers.FAM;
+
+ assert(F && FAM && "F or FAM is nullptr");
+
+ struct ACPOFIExtendedFeatures::FunctionFeatures FF;
+ auto &DomTree = FAM->getResult<DominatorTreeAnalysis>(*F);
+ auto &TTI = FAM->getResult<TargetIRAnalysis>(*F);
+ auto &LI = FAM->getResult<LoopAnalysis>(*F);
+ bool ValidSize = false;
+ bool ValidLoop = false;
+ bool ValidTree = false;
+ checkValidFFCache(*F, FF, DomTree, TTI, LI, ValidSize, ValidLoop, ValidTree);
+ FF = ACPOFIExtendedFeatures::getFunctionFeatures(
+ *F, DomTree, TTI, LI, FAM, ValidSize, ValidLoop, ValidTree);
+ getCachedFF(*F, FF, DomTree, TTI, LI);
+ updateCachedFF(*F, FF, DomTree, TTI, LI);
+
+ for (auto Idx = ACPOCollectFeatures::FeatureIndex::
+ ACPOFIExtendedFeaturesNamedFeatureBegin +
+ 1;
+ Idx !=
+ ACPOCollectFeatures::FeatureIndex::ACPOFIExtendedFeaturesNamedFeatureEnd;
+ ++Idx) {
+ size_t TmpIdx =
+ static_cast<size_t>(Idx) -
+ static_cast<size_t>(ACPOCollectFeatures::FeatureIndex::
+ ACPOFIExtendedFeaturesNamedFeatureBegin) -
+ 1;
+ ACF.setFeatureValueAndInfo(Idx, Info,
+ std::to_string(FF.NamedFeatures[TmpIdx]));
+ }
+ for (auto Idx = ACPOCollectFeatures::FeatureIndex::
+ ACPOFIExtendedFeaturesFloatFeatureBegin +
+ 1;
+ Idx !=
+ ACPOCollectFeatures::FeatureIndex::ACPOFIExtendedFeaturesFloatFeatureEnd;
+ ++Idx) {
+ size_t TmpIdx =
+ static_cast<size_t>(Idx) -
+ static_cast<size_t>(ACPOCollectFeatures::FeatureIndex::
+ ACPOFIExtendedFeaturesFloatFeatureBegin) -
+ 1;
+ ACF.setFeatureValueAndInfo(Idx, Info,
+ std::to_string(FF.NamedFloatFeatures[TmpIdx]));
+ }
+}
+
+void calculateIsIndirectCall(ACPOCollectFeatures &ACF,
+ const ACPOCollectFeatures::FeatureInfo &Info) {
+ assert(Info.Idx == ACPOCollectFeatures::FeatureIndex::NumOfFeatures ||
+ Info.Idx == ACPOCollectFeatures::FeatureIndex::IsIndirectCall);
+
+ // Check if we already calculated the values.
+ if (ACF.containsFeature(ACPOCollectFeatures::FeatureIndex::IsIndirectCall))
+ return;
+
+ auto *CB = Info.SI.CB;
+
+ assert(CB && "CallBase is nullptr");
+
+ ACF.setFeatureValueAndInfo(ACPOCollectFeatures::FeatureIndex::IsIndirectCall,
+ Info, std::to_string(CB->isIndirectCall()));
+}
+
+void calculateIsInInnerLoop(ACPOCollectFeatures &ACF,
+ const ACPOCollectFeatures::FeatureInfo &Info) {
+ assert(Info.Idx == ACPOCollectFeatures::FeatureIndex::NumOfFeatures ||
+ Info.Idx == ACPOCollectFeatures::FeatureIndex::IsInInnerLoop);
+
+ // Check if we already calculated the values.
+ if (ACF.containsFeature(ACPOCollectFeatures::FeatureIndex::IsInInnerLoop))
+ return;
+
+ auto *CB = Info.SI.CB;
+ auto *FAM = Info.Managers.FAM;
+
+ assert(CB && FAM && "CallBase or FAM is nullptr");
+
+ auto &Caller = *CB->getCaller();
+ auto &CallerLI = FAM->getResult<LoopAnalysis>(Caller);
+
+ // Get loop for CB's BB. And check whether the loop is an inner most loop.
+ bool CallSiteInInnerLoop = false;
+ for (auto &L : CallerLI) {
+ if (L->isInnermost() && L->contains(CB))
+ CallSiteInInnerLoop = true;
+ }
+
+ ACF.setFeatureValueAndInfo(ACPOCollectFeatures::FeatureIndex::IsInInnerLoop,
+ Info, std::to_string(CallSiteInInnerLoop));
+}
+
+void calculateIsMustTailCall(ACPOCollectFeatures &ACF,
+ const ACPOCollectFeatures::FeatureInfo &Info) {
+ assert(Info.Idx == ACPOCollectFeatures::FeatureIndex::NumOfFeatures ||
+ Info.Idx == ACPOCollectFeatures::FeatureIndex::IsMustTailCall);
+
+ // Check if we already calculated the values.
+ if (ACF.containsFeature(ACPOCollectFeatures::FeatureIndex::IsMustTailCall))
+ return;
+
+ auto *CB = Info.SI.CB;
+
+ assert(CB && "CallBase is nullptr");
+
+ ACF.setFeatureValueAndInfo(ACPOCollectFeatures::FeatureIndex::IsMustTailCall,
+ Info, std::to_string(CB->isMustTailCall()));
+}
+
+void calculateIsTailCall(ACPOCollectFeatures &ACF,
+ const ACPOCollectFeatures::FeatureInfo &Info) {
+ assert(Info.Idx == ACPOCollectFeatures::FeatureIndex::NumOfFeatures ||
+ Info.Idx == ACPOCollectFeatures::FeatureIndex::IsTailCall);
+
+ // Check if we already calculated the values.
+ if (ACF.containsFeature(ACPOCollectFeatures::FeatureIndex::IsTailCall))
+ return;
+
+ auto *CB = Info.SI.CB;
+
+ assert(CB && "CallBase is nullptr");
+
+ ACF.setFeatureValueAndInfo(ACPOCollectFeatures::FeatureIndex::IsTailCall,
+ Info, std::to_string(CB->isTailCall()));
+}
+
+ACPOCollectFeatures::FeatureValueMap ACPOCollectFeatures::getFeaturesPair(
+ ACPOCollectFeatures::FeaturesInfo FeatureInfoVec) {
+ clearFeatureValueMap();
+ for (auto &FeatureInfo : FeatureInfoVec) {
+ auto It = CalculateFeatureMap.find(FeatureInfo.Idx);
+ if (It == CalculateFeatureMap.end()) {
+ assert("Could not find the corresponding function to calculate feature");
+ }
+ auto CalculateFunction = It->second;
+ CalculateFunction(*this, FeatureInfo);
+ LLVM_DEBUG(dbgs() << "ACPO Feature " << getFeatureName(FeatureInfo.Idx)
+ << ": " << FeatureToValue[FeatureInfo.Idx] << "\n");
+ }
+
+ return FeatureToValue;
+}
+
+ACPOCollectFeatures::FeatureValueMap
+ACPOCollectFeatures::getFeaturesPair(ACPOCollectFeatures::Scopes ScopeVec) {
+ clearFeatureValueMap();
+ for (auto Scope : ScopeVec) {
+ for (auto FeatureIdx : getScopeFeatures(Scope)) {
+ auto It = CalculateFeatureMap.find(FeatureIdx);
+ if (It == CalculateFeatureMap.end()) {
+ assert(
+ "Could not find the corresponding function to calculate feature");
+ }
+ auto CalculateFunction = It->second;
+ CalculateFunction(*this, GlobalFeatureInfo);
+ LLVM_DEBUG(dbgs() << "ACPO Feature " << getFeatureName(FeatureIdx)
+ << ": " << FeatureToValue[FeatureIdx] << "\n");
+ }
+ }
+
+ return FeatureToValue;
+}
+
+ACPOCollectFeatures::FeatureValueMap
+ACPOCollectFeatures::getFeaturesPair(ACPOCollectFeatures::GroupIDs GroupIDVec) {
+ clearFeatureValueMap();
+ for (auto GroupID : GroupIDVec) {
+ for (auto FeatureIdx : getGroupFeatures(GroupID)) {
+ auto It = CalculateFeatureMap.find(FeatureIdx);
+ if (It == CalculateFeatureMap.end()) {
+ assert(
+ "Could not find the corresponding function to calculate feature");
+ }
+ auto CalculateFunction = It->second;
+ CalculateFunction(*this, GlobalFeatureInfo);
+ LLVM_DEBUG(dbgs() << "ACPO Feature " << getFeatureName(FeatureIdx)
+ << ": " << FeatureToValue[FeatureIdx] << "\n");
+ }
+ }
+
+ return FeatureToValue;
+}
+
+ACPOCollectFeatures::FeatureValueMap
+ACPOCollectFeatures::getFeaturesPair(ACPOCollectFeatures::FeatureIndex Beg,
+ ACPOCollectFeatures::FeatureIndex End) {
+ assert(Beg <= End);
+ for (auto Idx = Beg; Idx != End; ++Idx) {
+ auto It = CalculateFeatureMap.find(Idx);
+ if (It == CalculateFeatureMap.end()) {
+ assert("Could not find the corresponding function to calculate feature");
+ }
+ auto CalculateFunction = It->second;
+ CalculateFunction(*this, GlobalFeatureInfo);
+ }
+
+ return FeatureToValue;
+}
+
+void ACPOCollectFeatures::clearFunctionLevel() { FunctionLevels.clear(); }
+
+void ACPOCollectFeatures::insertFunctionLevel(const Function *F, unsigned FL) {
+ FunctionLevels[F] = FL;
+}
+
+std::optional<unsigned>
+ACPOCollectFeatures::getFunctionLevel(const Function *F) {
+ auto It = FunctionLevels.find(F);
+ if (It == FunctionLevels.end()) {
+ return std::nullopt;
+ } else {
+ return It->second;
+ }
+}
+
+ACPOCollectFeatures::FeatureIndex operator+(ACPOCollectFeatures::FeatureIndex N,
+ int Counter) {
+ return static_cast<ACPOCollectFeatures::FeatureIndex>((int)N + Counter);
+}
+
+ACPOCollectFeatures::FeatureIndex operator-(ACPOCollectFeatures::FeatureIndex N,
+ int Counter) {
+ return static_cast<ACPOCollectFeatures::FeatureIndex>((int)N - Counter);
+}
+
+ACPOCollectFeatures::FeatureIndex &
+operator++(ACPOCollectFeatures::FeatureIndex &N) {
+ return N = static_cast<ACPOCollectFeatures::FeatureIndex>((int)N + 1);
+}
+
+ACPOCollectFeatures::FeatureIndex
+operator++(ACPOCollectFeatures::FeatureIndex &N, int) {
+ ACPOCollectFeatures::FeatureIndex Res = N;
+ ++N;
+ return Res;
+}
+
+} // namespace llvm
diff --git a/llvm/lib/Analysis/ACPOMLInterface.cpp b/llvm/lib/Analysis/ACPOMLInterface.cpp
new file mode 100644
index 000000000000..271dcfe7d851
--- /dev/null
+++ b/llvm/lib/Analysis/ACPOMLInterface.cpp
@@ -0,0 +1,1405 @@
+//===- ACPOMLInterface.cpp - AI-Enabled Continuous Program Optimization ---===//
+//
+// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
+// See https://llvm.org/LICENSE.txt for license information.
+// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
+//
+//===----------------------------------------------------------------------===//
+//
+// This file implements an interface to the ML framework.
+//
+//===----------------------------------------------------------------------===//
+
+#include "llvm/Analysis/ACPOMLInterface.h"
+#include "llvm/Analysis/ACPOModelRunner.h"
+#include "llvm/Analysis/FIModelRunner.h"
+#include "llvm/Analysis/TensorSpec.h"
+#include "llvm/Support/Process.h"
+#include "llvm/Support/Program.h"
+#include "llvm/Support/raw_ostream.h"
+
+#include <ctime>
+#include <fstream>
+#include <stdio.h>
+#include <stdlib.h>
+#include <string>
+#include <vector>
+
+#ifdef _WIN32
+#include <Windows.h>
+#else
+#include <unistd.h>
+#endif
+
+using namespace llvm;
+
+#define DEBUG_TYPE "acpo"
+
+#define ACPO_ENV_VAR_DIR "ACPO_DIR"
+#define ACPO_ML_PYTHON_INTERFACE_PY "MLInterface.py"
+#define ACPO_PYTHON_EXECUTABLE "python"
+#define ACPO_PIPE_PREFIX "ACPO_Pipe"
+
+#define RESPONSE_MODEL_LOADED "Model loaded"
+#define RESPONSE_ALREADY_IN_DICT "already in dict"
+#define RESPONSE_FEATURE_SET "Feature set"
+#define RESPONSE_FEATURES_INITIALIZED "Features initialized"
+#define RESPONSE_FEATURES_SET "Features set"
+#define RESPONSE_COMPLETED "Completed"
+#define RESPONSE_ACTIVE "Active"
+#define RESPONSE_ERROR "ERROR"
+
+// Static variables
+
+static std::shared_ptr<ACPOMLInterface> PersistentMLIF = nullptr;
+
+// Class definitions
+
+bool Model::registerFeature(std::string FeatureName, uint64_t FeatureID,
+ int Index) {
+ auto Find1 = NameToID.find(FeatureName);
+ if (Find1 != NameToID.end()) {
+ LLVM_DEBUG(dbgs() << "ERROR in registerFeature: Feature " << FeatureName
+ << " already exists\n");
+ return false;
+ }
+ NameToID.insert(std::make_pair(FeatureName, FeatureID));
+ IDToName.insert(std::make_pair(FeatureID, FeatureName));
+ auto Find2 = IDToIndex.find(FeatureID);
+ if (Find2 != IDToIndex.end()) {
+ LLVM_DEBUG(dbgs() << "ERROR in registerFeature: Feature with ID "
+ << FeatureID << " already exists\n");
+ return false;
+ }
+ IDToIndex.insert(std::make_pair(FeatureID, Index));
+ return true;
+}
+
+bool Model::registerInput(std::string InputName, std::string InputType) {
+ auto Find = InputMap.find(InputName);
+ if (Find != InputMap.end()) {
+ LLVM_DEBUG(dbgs() << "ERROR in registerInput: Input " << InputName
+ << " already exists\n");
+ return false;
+ }
+ InputMap.insert(std::make_pair(InputName, InputType));
+ return true;
+}
+
+bool Model::registerOutput(std::string OutputName, std::string OutputType) {
+ auto Find = OutputMap.find(OutputName);
+ if (Find != OutputMap.end()) {
+ LLVM_DEBUG(dbgs() << "ERROR in registerOutput: Output " << OutputName
+ << " already exists\n");
+ return false;
+ }
+ OutputMap.insert(std::make_pair(OutputName, OutputType));
+ return true;
+}
+
+int Model::getIndex(uint64_t FeatureID) const {
+ auto Find = IDToIndex.find(FeatureID);
+ assert(Find != IDToIndex.end());
+ return Find->second;
+}
+
+int Model::getIndex(std::string FeatureName) const {
+ auto Find = NameToID.find(FeatureName);
+ assert(Find != NameToID.end());
+ uint64_t ID = Find->second;
+ return getIndex(ID);
+}
+
+std::string Model::getName(uint64_t FeatureID) const {
+ auto Find = IDToName.find(FeatureID);
+ assert(Find != IDToName.end());
+ return Find->second;
+}
+
+bool Model::checkOutputExists(std::string OutputName) const {
+ return (OutputMap.find(OutputName) != OutputMap.end());
+}
+
+std::string Model::getInputType(std::string InputName) const {
+ auto Find = InputMap.find(InputName);
+ assert(Find != InputMap.end());
+ return Find->second;
+}
+
+std::string Model::getOutputType(std::string OutputName) const {
+ auto Find = OutputMap.find(OutputName);
+ assert(Find != OutputMap.end());
+ return Find->second;
+}
+
+ACPOMLPythonInterface::ACPOMLPythonInterface() : NextID{0} {
+ std::optional<std::string> Env = llvm::sys::Process::GetEnv(ACPO_ENV_VAR_DIR);
+ if (!Env || *Env == "") {
+ std::optional<std::string> LLVMDIROpt =
+ llvm::sys::Process::GetEnv("LLVM_DIR");
+ if (LLVMDIROpt) {
+ Env = *LLVMDIROpt + "/acpo/";
+ } else {
+ return;
+ }
+ }
+
+ int32_t PID = (int32_t) llvm::sys::Process::getProcessId();
+ std::string ExecPython = "/usr/bin/python3";
+ std::string
+ PythonScript = *Env + "/" + std::string(ACPO_ML_PYTHON_INTERFACE_PY);
+ std::string PIDStr = std::to_string(PID);
+ std::string TimeStr = std::to_string(time(nullptr));
+ std::string NameOut =
+ *Env + "/" + ACPO_PIPE_PREFIX + "_CMD_" + PIDStr + "_" + TimeStr;
+ std::string NameIn =
+ *Env + "/" + ACPO_PIPE_PREFIX + "_RESP_" + PIDStr + "_" + TimeStr;
+ StringRef Args[] = { ExecPython, PythonScript, NameOut, NameIn };
+
+ // Start a process and don't wait for it to finish. We want it running in
+ // tandem.
+ std::string ErrMsg;
+ SubProcess =
+ sys::ExecuteNoWait(ExecPython, Args, std::nullopt, {}, 0, &ErrMsg);
+ if (!SubProcess.Pid) {
+ // Print out error message if the process fails to start.
+ LLVM_DEBUG(dbgs() << ErrMsg << "\n");
+ return;
+ }
+ // Yield to Python Process to set up pipes.
+ const int PythonProcessStartupLatency = 100;
+ usleep(PythonProcessStartupLatency);
+
+ // Now link to named pipes created by the process we just started. Note that
+ // because the creation of this file as a pipe was done elsewhere, the
+ // interface here is simple.
+
+ // First check that the response pipe has been created by attempting to open a
+ // file for reading. If this is not successful, then sleep for 100us to allow
+ // the ML interface the time to create named pipes and open the response pipe
+ // for writing. Once that is done, the fopen call will pass here.
+
+ // FIXME: Support library provides robust and portable APIs for opening files
+ // and creating input/output streams. Use them instead of calling libc
+ // functions.
+ PipeIn = fopen(NameIn.c_str(), "r");
+ if (PipeIn == nullptr) {
+ do {
+ usleep(100);
+ PipeIn = fopen(NameIn.c_str(), "r");
+ } while (PipeIn == nullptr);
+ }
+
+ // Once the response FIFO is created, then open the command FIFO for writing.
+ // This will complete the handshake with the MLInterface in Python.
+ PipeOut = fopen(NameOut.c_str(), "w");
+ // Now open named pipes to the new process.
+ setInitialized(true);
+}
+
+ACPOMLPythonInterface::~ACPOMLPythonInterface() {
+ if (SubProcess.Pid)
+ closeMLInterface();
+ if (PipeIn)
+ fclose(PipeIn);
+ if (PipeOut)
+ fclose(PipeOut);
+ if (SubProcess.Pid) {
+ // Wait for the MLInterface 3 seconds and kill it.
+ sys::Wait(SubProcess, 3) ;
+ SubProcess = sys::ProcessInfo{};
+ }
+ setInitialized(false);
+}
+
+uint64_t ACPOMLPythonInterface::assignID() {
+ NextID++;
+ return NextID - 1;
+}
+
+bool ACPOMLPythonInterface::loadModel(std::string ModelSpecFile) {
+ sendCommand("LoadModel " + ModelSpecFile);
+ std::string Response = getResponse();
+ std::vector<std::string> Tokens = tokenize(Response);
+ if (Tokens[0] != RESPONSE_MODEL_LOADED) {
+ return false;
+ }
+ if (Tokens[1] == RESPONSE_ALREADY_IN_DICT) {
+ LLVM_DEBUG(dbgs() << "loadModel: the model specified in " << ModelSpecFile
+ << " has already been loaded\n");
+ return true;
+ }
+ std::string ModelName = Tokens[1];
+ int NumFeatures = std::stoi(Tokens[2]);
+ LLVM_DEBUG(dbgs() << "Registering features: " << NumFeatures << "\n");
+ registerModel(ModelName, NumFeatures);
+ auto ModelPtr = ModelMap.find(ModelName)->second;
+ std::string FeatureName = "";
+ for (int I = 0; I < NumFeatures; I++) {
+ FeatureName = Tokens[I + 3];
+ if (!registerFeature(ModelName, FeatureName, I)) {
+ return false;
+ }
+ }
+ int OutputStart = 3 + NumFeatures;
+ int NumOutputs = std::stoi(Tokens[OutputStart]);
+ ModelPtr->setNumOutputs(NumOutputs);
+ OutputStart++;
+ std::string OutputName;
+ std::string OutputType;
+ for (int I = 0; I < NumOutputs; I++) {
+ std::istringstream IS(Tokens[OutputStart + I]);
+ IS >> OutputName >> OutputType;
+ if (!registerOutput(ModelName, OutputName, OutputType)) {
+ return false;
+ }
+ }
+ std::string Signature = Tokens[OutputStart + NumOutputs];
+ ModelPtr->setSignature(Signature);
+ return true;
+}
+
+bool ACPOMLPythonInterface::registerModel(std::string ModelName,
+ int NumFeatures) {
+ auto Find = ModelMap.find(ModelName);
+ if (Find != ModelMap.end()) {
+ LLVM_DEBUG(dbgs() << "registerModel: Model " << ModelName
+ << " already exists\n");
+ return false;
+ }
+ std::shared_ptr<Model> NewModel = std::make_shared<Model>(NumFeatures);
+ ModelMap.insert(std::make_pair(ModelName, NewModel));
+ return true;
+}
+
+bool ACPOMLPythonInterface::registerModel(std::string ModelName,
+ int NumFeatures, int NumOutputs) {
+ auto Find = ModelMap.find(ModelName);
+ if (Find != ModelMap.end()) {
+ LLVM_DEBUG(dbgs() << "registerModel: Model " << ModelName
+ << " already exists\n");
+ return false;
+ }
+ std::shared_ptr<Model> NewModel =
+ std::make_shared<Model>(NumFeatures, NumOutputs);
+ ModelMap.insert(std::make_pair(ModelName, NewModel));
+ return true;
+}
+
+bool ACPOMLPythonInterface::registerFeature(std::string ModelName,
+ std::string FeatureName,
+ int Index) {
+ auto Find = ModelMap.find(ModelName);
+ assert(Find != ModelMap.end());
+ if (Find == ModelMap.end()) {
+ LLVM_DEBUG(dbgs() << "ERROR in registerFeature: Model " << ModelName
+ << " has not been loaded\n");
+ return false;
+ }
+ uint64_t ID = assignID();
+ return Find->second->registerFeature(FeatureName, ID, Index);
+}
+
+bool ACPOMLPythonInterface::registerOutput(std::string ModelName,
+ std::string OutputName,
+ std::string OutputType) {
+ auto Find = ModelMap.find(ModelName);
+ if (Find == ModelMap.end()) {
+ LLVM_DEBUG(dbgs() << "ERROR in registerOutput: Model " << ModelName
+ << " has not been loaded\n");
+ return false;
+ }
+ return Find->second->registerOutput(OutputName, OutputType);
+}
+
+int ACPOMLPythonInterface::getNumLoadedModels() { return ModelMap.size(); }
+
+bool ACPOMLPythonInterface::defineInputIR(std::string Filename) {
+ return false;
+}
+
+bool ACPOMLPythonInterface::setCustomFeature(std::string ModelName,
+ uint64_t FeatureID,
+ int FeatureValue) {
+ auto Find = ModelMap.find(ModelName);
+ if (Find == ModelMap.end()) {
+ LLVM_DEBUG(dbgs() << "ERROR in setCustomFeature: Model " << ModelName
+ << " has not been loaded\n");
+ return false;
+ }
+ int Index = Find->second->getIndex(FeatureID);
+ sendCommand("SetCustomFeature " + std::to_string(Index) + " " +
+ std::to_string(FeatureValue));
+ std::string Response = getResponse();
+ return (Response.find(RESPONSE_FEATURE_SET) == 0);
+}
+
+bool ACPOMLPythonInterface::setCustomFeature(std::string ModelName,
+ uint64_t FeatureID,
+ int64_t FeatureValue) {
+ auto Find = ModelMap.find(ModelName);
+ if (Find == ModelMap.end()) {
+ LLVM_DEBUG(dbgs() << "ERROR in setCustomFeature: Model " << ModelName
+ << " has not been loaded\n");
+ return false;
+ }
+ int Index = Find->second->getIndex(FeatureID);
+ sendCommand("SetCustomFeature " + std::to_string(Index) + " " +
+ std::to_string(FeatureValue));
+ std::string Response = getResponse();
+ return (Response.find(RESPONSE_FEATURE_SET) == 0);
+}
+
+bool ACPOMLPythonInterface::setCustomFeature(std::string ModelName,
+ uint64_t FeatureID,
+ double FeatureValue) {
+ auto Find = ModelMap.find(ModelName);
+ if (Find == ModelMap.end()) {
+ LLVM_DEBUG(dbgs() << "ERROR in setCustomFeature: Model " << ModelName
+ << " has not been loaded\n");
+ return false;
+ }
+ int Index = Find->second->getIndex(FeatureID);
+ sendCommand("SetCustomFeature " + std::to_string(Index) + " " +
+ std::to_string(FeatureValue));
+ std::string Response = getResponse();
+ return (Response.find(RESPONSE_FEATURE_SET) == 0);
+}
+
+bool ACPOMLPythonInterface::setCustomFeature(std::string ModelName,
+ uint64_t FeatureID,
+ float FeatureValue) {
+ auto Find = ModelMap.find(ModelName);
+ if (Find == ModelMap.end()) {
+ LLVM_DEBUG(dbgs() << "ERROR in setCustomFeature: Model " << ModelName
+ << " has not been loaded\n");
+ return false;
+ }
+ int Index = Find->second->getIndex(FeatureID);
+ sendCommand("SetCustomFeature " + std::to_string(Index) + " " +
+ std::to_string(FeatureValue));
+ std::string Response = getResponse();
+ return (Response.find(RESPONSE_FEATURE_SET) == 0);
+}
+
+bool ACPOMLPythonInterface::setCustomFeature(std::string ModelName,
+ uint64_t FeatureID,
+ bool FeatureValue) {
+ auto Find = ModelMap.find(ModelName);
+ if (Find == ModelMap.end()) {
+ LLVM_DEBUG(dbgs() << "ERROR in setCustomFeature: Model " << ModelName
+ << " has not been loaded\n");
+ return false;
+ }
+ int Index = Find->second->getIndex(FeatureID);
+ std::string Command = "SetCustomFeature " + std::to_string(Index) + " ";
+ Command += FeatureValue ? "1" : "0";
+ sendCommand(Command);
+ std::string Response = getResponse();
+ return (Response.find(RESPONSE_FEATURE_SET) == 0);
+}
+
+bool ACPOMLPythonInterface::setCustomFeature(std::string ModelName,
+ std::string FeatureName,
+ int FeatureValue) {
+ auto Find = ModelMap.find(ModelName);
+ if (Find == ModelMap.end()) {
+ LLVM_DEBUG(dbgs() << "ERROR in setCustomFeature: Model " << ModelName
+ << " has not been loaded\n");
+ return false;
+ }
+ int Index = Find->second->getIndex(FeatureName);
+ sendCommand("SetCustomFeature " + std::to_string(Index) + " " +
+ std::to_string(FeatureValue));
+ std::string Response = getResponse();
+ return (Response.find(RESPONSE_FEATURE_SET) == 0);
+}
+
+bool ACPOMLPythonInterface::setCustomFeature(std::string ModelName,
+ std::string FeatureName,
+ int64_t FeatureValue) {
+ auto Find = ModelMap.find(ModelName);
+ if (Find == ModelMap.end()) {
+ LLVM_DEBUG(dbgs() << "ERROR in setCustomFeature: Model " << ModelName
+ << " has not been loaded\n");
+ return false;
+ }
+ int Index = Find->second->getIndex(FeatureName);
+ sendCommand("SetCustomFeature " + std::to_string(Index) + " " +
+ std::to_string(FeatureValue));
+ std::string Response = getResponse();
+ return (Response.find(RESPONSE_FEATURE_SET) == 0);
+}
+
+bool ACPOMLPythonInterface::setCustomFeature(std::string ModelName,
+ std::string FeatureName,
+ double FeatureValue) {
+ auto Find = ModelMap.find(ModelName);
+ if (Find == ModelMap.end()) {
+ LLVM_DEBUG(dbgs() << "ERROR in setCustomFeature: Model " << ModelName
+ << " has not been loaded\n");
+ return false;
+ }
+ int Index = Find->second->getIndex(FeatureName);
+ sendCommand("SetCustomFeature " + std::to_string(Index) + " " +
+ std::to_string(FeatureValue));
+ std::string Response = getResponse();
+ return (Response.find(RESPONSE_FEATURE_SET) == 0);
+}
+
+bool ACPOMLPythonInterface::setCustomFeature(std::string ModelName,
+ std::string FeatureName,
+ float FeatureValue) {
+ auto Find = ModelMap.find(ModelName);
+ if (Find == ModelMap.end()) {
+ LLVM_DEBUG(dbgs() << "ERROR in setCustomFeature: Model " << ModelName
+ << " has not been loaded\n");
+ return false;
+ }
+ int Index = Find->second->getIndex(FeatureName);
+ sendCommand("SetCustomFeature " + std::to_string(Index) + " " +
+ std::to_string(FeatureValue));
+ std::string Response = getResponse();
+ return (Response.find(RESPONSE_FEATURE_SET) == 0);
+}
+
+bool ACPOMLPythonInterface::setCustomFeature(std::string ModelName,
+ std::string FeatureName,
+ bool FeatureValue) {
+ auto Find = ModelMap.find(ModelName);
+ if (Find == ModelMap.end()) {
+ LLVM_DEBUG(dbgs() << "ERROR in setCustomFeature: Model " << ModelName
+ << " has not been loaded\n");
+ return false;
+ }
+ int Index = Find->second->getIndex(FeatureName);
+ std::string Command = "SetCustomFeature " + std::to_string(Index) + " ";
+ Command += FeatureValue ? "1" : "0";
+ sendCommand(Command);
+ std::string Response = getResponse();
+ std::vector<std::string> Tokens = tokenize(Response);
+ return (Response.find(RESPONSE_FEATURE_SET) == 0);
+}
+
+bool ACPOMLPythonInterface::initializeFeatures(
+ std::string ModelName,
+ const std::vector<std::pair<uint64_t, std::string>> &FeatureValues) {
+ auto Find = ModelMap.find(ModelName);
+ if (Find == ModelMap.end()) {
+ LLVM_DEBUG(dbgs() << "ERROR in initializeFeatures: Model " << ModelName
+ << " has not been loaded\n");
+ return false;
+ }
+ if (FeatureValues.size() > Find->second->getNumFeatures()) {
+ LLVM_DEBUG(dbgs() << "ERROR in initializeFeatures: Invalid features\n");
+ return false;
+ }
+ CurrentlyActiveModel = ModelName;
+ std::string Command = "InitializeFeatures " + ModelName;
+ for (const auto &Feature : FeatureValues) {
+ uint64_t FeatureID = Feature.first;
+ std::string FeatureValue = Feature.second;
+ int Index = Find->second->getIndex(FeatureID);
+ Command += " " + std::to_string(Index) + " " + FeatureValue;
+ }
+ sendCommand(Command);
+ std::string Response = getResponse();
+ return (Response.find(RESPONSE_FEATURES_INITIALIZED) == 0);
+}
+
+bool ACPOMLPythonInterface::initializeFeatures(
+ std::string ModelName,
+ const std::vector<std::pair<std::string, std::string>> &FeatureValues) {
+ auto Find = ModelMap.find(ModelName);
+ if (Find == ModelMap.end()) {
+ LLVM_DEBUG(dbgs() << "ERROR in initializeFeatures: Model " << ModelName
+ << " has not been loaded\n");
+ return false;
+ }
+ if (FeatureValues.size() > Find->second->getNumFeatures()) {
+ LLVM_DEBUG(dbgs() << "ERROR in initializeFeatures: Invalid features\n");
+ return false;
+ }
+ CurrentlyActiveModel = ModelName;
+ std::string Command = "InitializeFeatures " + ModelName;
+ for (const auto &Feature : FeatureValues) {
+ std::string FeatureName = Feature.first;
+ std::string FeatureValue = Feature.second;
+ int Index = Find->second->getIndex(FeatureName);
+ Command += " " + std::to_string(Index) + " " + FeatureValue;
+ }
+ sendCommand(Command);
+ std::string Response = getResponse();
+ return (Response.find(RESPONSE_FEATURES_INITIALIZED) == 0);
+}
+
+bool ACPOMLPythonInterface::setCustomFeatures(
+ std::string ModelName,
+ const std::vector<std::pair<uint64_t, std::string>> &FeatureValues) {
+ if (ModelName != CurrentlyActiveModel) {
+ LLVM_DEBUG(dbgs() << "ERROR in setCustomFeatures: Model " << ModelName
+ << " has not been loaded or is not active\n");
+ return false;
+ }
+ auto Find = ModelMap.find(ModelName);
+ if (FeatureValues.size() > Find->second->getNumFeatures()) {
+ LLVM_DEBUG(dbgs() << "ERROR in setCustomFeatures: Invalid features\n");
+ return false;
+ }
+ std::string Command = "SetCustomFeatures";
+ for (const auto &Feature : FeatureValues) {
+ uint64_t FeatureID = Feature.first;
+ std::string FeatureValue = Feature.second;
+ int Index = Find->second->getIndex(FeatureID);
+ Command += " " + std::to_string(Index) + " " + FeatureValue;
+ }
+ sendCommand(Command);
+ std::string Response = getResponse();
+ return (Response.find(RESPONSE_FEATURES_SET) == 0);
+}
+
+bool ACPOMLPythonInterface::setCustomFeatures(
+ std::string ModelName,
+ const std::vector<std::pair<std::string, std::string>> &FeatureValues) {
+ if (ModelName != CurrentlyActiveModel) {
+ LLVM_DEBUG(dbgs() << "ERROR in setCustomFeatures: Model " << ModelName
+ << " has not been loaded or is not active\n");
+ return false;
+ }
+ auto Find = ModelMap.find(ModelName);
+ if (FeatureValues.size() > Find->second->getNumFeatures()) {
+ LLVM_DEBUG(dbgs() << "ERROR in setCustomFeatures: Invalid features\n");
+ return false;
+ }
+ std::string Command = "SetCustomFeatures";
+ for (const auto &Feature : FeatureValues) {
+ std::string FeatureName = Feature.first;
+ std::string FeatureValue = Feature.second;
+ int Index = Find->second->getIndex(FeatureName);
+ Command += " " + std::to_string(Index) + " " + FeatureValue;
+ }
+ sendCommand(Command);
+ std::string Response = getResponse();
+ return (Response.find(RESPONSE_FEATURES_SET) == 0);
+}
+
+bool ACPOMLPythonInterface::runModel(std::string ModelName) {
+ if (ModelName != CurrentlyActiveModel) {
+ LLVM_DEBUG(dbgs() << "ERROR in runModel: Model " << ModelName
+ << " is not active\n");
+ return false;
+ }
+ sendCommand("RunModel");
+ std::string Response = getResponse();
+ return (Response.find(RESPONSE_COMPLETED) == 0);
+}
+
+std::string ACPOMLPythonInterface::getOutputType(std::string ModelName,
+ std::string OutputName) {
+ auto Find = ModelMap.find(ModelName);
+ assert(Find != ModelMap.end());
+ return Find->second->getOutputType(OutputName);
+}
+
+int ACPOMLPythonInterface::getModelResultI(std::string OutputName) {
+ auto Find = ModelMap.find(CurrentlyActiveModel);
+ assert(Find->second->checkOutputExists(OutputName));
+ sendCommand("GetModelOutput " + OutputName);
+ std::string Response = getResponse();
+ std::vector<std::string> Tokens = tokenize(Response);
+ assert(Tokens.size() == 3);
+ assert(Tokens[0] == OutputName);
+ int Result = std::stoi(Tokens[2]);
+ return Result;
+}
+
+int64_t ACPOMLPythonInterface::getModelResultI64(std::string OutputName) {
+ auto Find = ModelMap.find(CurrentlyActiveModel);
+ assert(Find->second->checkOutputExists(OutputName));
+ sendCommand("GetModelOutput " + OutputName);
+ std::string Response = getResponse();
+ std::vector<std::string> Tokens = tokenize(Response);
+ assert(Tokens.size() == 3);
+ assert(Tokens[0] == OutputName);
+ int64_t Result = std::stol(Tokens[2]);
+ return Result;
+}
+
+float ACPOMLPythonInterface::getModelResultF(std::string OutputName) {
+ auto Find = ModelMap.find(CurrentlyActiveModel);
+ assert(Find->second->checkOutputExists(OutputName));
+ sendCommand("GetModelOutput " + OutputName);
+ std::string Response = getResponse();
+ std::vector<std::string> Tokens = tokenize(Response);
+ assert(Tokens.size() == 3);
+ assert(Tokens[0] == OutputName);
+ float Result = std::stof(Tokens[2]);
+ return Result;
+}
+
+double ACPOMLPythonInterface::getModelResultD(std::string OutputName) {
+ auto Find = ModelMap.find(CurrentlyActiveModel);
+ assert(Find->second->checkOutputExists(OutputName));
+ sendCommand("GetModelOutput " + OutputName);
+ std::string Response = getResponse();
+ std::vector<std::string> Tokens = tokenize(Response);
+ assert(Tokens.size() == 3);
+ assert(Tokens[0] == OutputName);
+ double Result = std::stod(Tokens[2]);
+ return Result;
+}
+
+bool ACPOMLPythonInterface::getModelResultB(std::string OutputName) {
+ auto Find = ModelMap.find(CurrentlyActiveModel);
+ assert(Find->second->checkOutputExists(OutputName));
+ sendCommand("GetModelOutput " + OutputName);
+ std::string Response = getResponse();
+ std::vector<std::string> Tokens = tokenize(Response);
+ assert(Tokens.size() == 3);
+ assert(Tokens[0] == OutputName);
+ return (Tokens[2] == "1");
+}
+
+int ACPOMLPythonInterface::getStatus() {
+ sendCommand("GetStatus");
+ std::string Response = getResponse();
+ return Response.find(RESPONSE_ACTIVE) == 0;
+}
+
+bool ACPOMLPythonInterface::releaseModel(std::string ModelName) {
+ sendCommand("ReleaseModel " + ModelName);
+ std::string Response = getResponse();
+ ModelMap.erase(ModelName);
+ CurrentlyActiveModel = "";
+ return true;
+}
+
+bool ACPOMLPythonInterface::closeMLInterface() {
+ sendCommand("CloseMLInterface");
+ std::string Response = getResponse();
+ return true;
+}
+
+void ACPOMLPythonInterface::sendCommand(const std::string &Command) {
+ fprintf(PipeOut,"%s\n", Command.c_str());
+ fflush(PipeOut);
+ usleep(1);
+}
+
+void ACPOMLPythonInterface::sendCommand(
+ const std::vector<std::string> &Features) {
+ for (auto I = Features.begin(); I != Features.end(); I++) {
+ fprintf(PipeOut,"%s\n", I->c_str());
+ fflush(PipeOut);
+ usleep(1);
+ }
+}
+
+std::string ACPOMLPythonInterface::getResponse() {
+ std::string Response = "";
+ char Letter = getc(PipeIn);
+ while (Letter != '\n') {
+ if (feof(PipeIn))
+ assert(false && "ACPO pipeline is closed unexpectively.");
+
+ Response += Letter;
+ Letter = getc(PipeIn);
+ }
+ Response += '\n';
+ if (Response.substr(0, 5) == RESPONSE_ERROR) {
+ LLVM_DEBUG(dbgs() << Response);
+ assert(false && "MLInterface reutrned error");
+ }
+ return Response;
+}
+
+std::vector<std::string>
+ACPOMLPythonInterface::tokenize(const std::string &Line) {
+ std::vector<std::string> Result;
+ std::string Temp = Line;
+ auto Loc = Temp.find(",");
+ while (Loc != std::string::npos) {
+ std::string Sub = Temp.substr(0, Loc);
+ Result.push_back(Sub);
+ Temp = Temp.substr(Loc + 1);
+ Loc = Temp.find(",");
+ }
+ if (Temp.length() > 0)
+ Result.push_back(Temp);
+
+ return Result;
+}
+
+std::shared_ptr<ACPOMLInterface> llvm::createPersistentPythonMLIF() {
+ if (PersistentMLIF == nullptr) {
+ PersistentMLIF = std::make_shared<ACPOMLPythonInterface>();
+
+ if (!PersistentMLIF->isInitialized())
+ PersistentMLIF = nullptr;
+ }
+ return PersistentMLIF;
+}
+
+ACPOMLCPPInterface::ACPOMLCPPInterface() { setInitialized(true); }
+
+ACPOMLCPPInterface::~ACPOMLCPPInterface() {}
+
+uint64_t ACPOMLCPPInterface::assignID() {
+ NextID++;
+ return NextID - 1;
+}
+
+bool ACPOMLCPPInterface::loadModel(std::string ModelSpecFile) {
+ std::string ModelName = readModelParam(ModelSpecFile, "ModelName");
+ // Check if the model is already in the dictionary
+ if (RunnerMap.find(ModelName) != RunnerMap.end()) {
+ LLVM_DEBUG(dbgs() << "loadModel: the compiled model '" << ModelName
+ << "' has already been loaded\n");
+ return true;
+ }
+ std::vector<std::pair<std::string, std::string>> Features{};
+ readFeatures(ModelSpecFile, Features);
+ std::vector<std::pair<std::string, std::string>> Outputs{};
+ readOutputs(ModelSpecFile, Outputs);
+
+ LLVM_DEBUG(llvm::dbgs() << "Loading compiled model with name " << ModelName
+ << "\n");
+
+ auto CreatorFunctionIterator = CreateModelRunnerMap.find(ModelName);
+ if (CreatorFunctionIterator == CreateModelRunnerMap.end()) {
+ LLVM_DEBUG(llvm::dbgs()
+ << ("Could not find compiled model class for model '" +
+ ModelName + "'\n"));
+ return false;
+ }
+
+ auto CreatorFunction = CreatorFunctionIterator->second;
+
+ std::string OutputKey = readModelParam(ModelSpecFile, "OutputKey");
+ auto ModelRunner = CreatorFunction(Features, OutputKey);
+
+ registerModel(ModelName, Features.size());
+ RunnerMap.insert(std::make_pair(ModelName, std::move(ModelRunner)));
+ auto ModelPtr = ModelMap.find(ModelName)->second;
+ for (size_t I = 0; I < Features.size(); I++) {
+ if (!registerFeature(ModelName, Features[I].first, I)) {
+ return false;
+ }
+ if (!ModelPtr->registerInput(Features[I].first, Features[I].second)) {
+ return false;
+ }
+ }
+
+ ModelPtr->setNumOutputs(Outputs.size());
+ for (size_t I = 0; I < Outputs.size(); I++) {
+ if (!registerOutput(ModelName, Outputs[I].first, Outputs[I].second)) {
+ return false;
+ }
+ }
+
+ LLVM_DEBUG(llvm::dbgs() << "Model " << ModelName
+ << " was successfully loaded\n");
+
+ // We do not need to set signature here because it is already given to make
+ // the precompiled model
+ return true;
+}
+
+bool ACPOMLCPPInterface::registerModel(std::string ModelName, int NumFeatures) {
+ auto Find = ModelMap.find(ModelName);
+ if (Find != ModelMap.end()) {
+ LLVM_DEBUG(dbgs() << "registerModel: Model " << ModelName
+ << " already exists\n");
+ return false;
+ }
+ std::shared_ptr<Model> NewModel = std::make_shared<Model>(NumFeatures);
+ ModelMap.insert(std::make_pair(ModelName, NewModel));
+ return true;
+}
+
+bool ACPOMLCPPInterface::registerModel(std::string ModelName, int NumFeatures,
+ int NumOutputs) {
+ auto Find = ModelMap.find(ModelName);
+ if (Find != ModelMap.end()) {
+ LLVM_DEBUG(dbgs() << "registerModel: Model " << ModelName
+ << " already exists\n");
+ return false;
+ }
+ std::shared_ptr<Model> NewModel =
+ std::make_shared<Model>(NumFeatures, NumOutputs);
+ ModelMap.insert(std::make_pair(ModelName, NewModel));
+ return true;
+}
+
+bool ACPOMLCPPInterface::registerFeature(std::string ModelName,
+ std::string FeatureName, int Index) {
+ auto Find = ModelMap.find(ModelName);
+ assert(Find != ModelMap.end());
+ if (Find == ModelMap.end()) {
+ LLVM_DEBUG(dbgs() << "ERROR in registerFeature: Model " << ModelName
+ << " has not been loaded\n");
+ return false;
+ }
+ uint64_t ID = assignID();
+ return Find->second->registerFeature(FeatureName, ID, Index);
+}
+
+bool ACPOMLCPPInterface::registerOutput(std::string ModelName,
+ std::string OutputName,
+ std::string OutputType) {
+ auto Find = ModelMap.find(ModelName);
+ if (Find == ModelMap.end()) {
+ LLVM_DEBUG(dbgs() << "ERROR in registerOutput: Model " << ModelName
+ << " has not been loaded\n");
+ return false;
+ }
+ return Find->second->registerOutput(OutputName, OutputType);
+}
+
+int ACPOMLCPPInterface::getNumLoadedModels() { return ModelMap.size(); }
+
+bool ACPOMLCPPInterface::defineInputIR(std::string Filename) { return false; }
+
+bool ACPOMLCPPInterface::setCustomFeature(std::string ModelName,
+ uint64_t FeatureID,
+ int FeatureValue) {
+ LLVM_DEBUG(
+ dbgs()
+ << "ACPOMLCPPInterface: setting custom feature of type int in model "
+ << ModelName << "\n");
+ auto Find = ModelMap.find(ModelName);
+ if (Find == ModelMap.end()) {
+ LLVM_DEBUG(dbgs() << "ERROR in setCustomFeature: Model " << ModelName
+ << " has not been loaded\n");
+ return false;
+ }
+ int Index = Find->second->getIndex(FeatureID);
+ std::shared_ptr<llvm::ACPOModelRunner> Runner =
+ RunnerMap.find(ModelName)->second;
+ return Runner->setCustomFeature(Index, FeatureValue);
+}
+
+bool ACPOMLCPPInterface::setCustomFeature(std::string ModelName,
+ uint64_t FeatureID,
+ int64_t FeatureValue) {
+ LLVM_DEBUG(
+ dbgs()
+ << "ACPOMLCPPInterface: setting custom feature of type double in model "
+ << ModelName << "\n");
+ auto Find = ModelMap.find(ModelName);
+ if (Find == ModelMap.end()) {
+ LLVM_DEBUG(dbgs() << "ERROR in setCustomFeature: Model " << ModelName
+ << " has not been loaded\n");
+ return false;
+ }
+ int Index = Find->second->getIndex(FeatureID);
+ std::shared_ptr<llvm::ACPOModelRunner> Runner =
+ RunnerMap.find(ModelName)->second;
+ return Runner->setCustomFeature(Index, FeatureValue);
+}
+
+bool ACPOMLCPPInterface::setCustomFeature(std::string ModelName,
+ uint64_t FeatureID,
+ double FeatureValue) {
+ LLVM_DEBUG(
+ dbgs()
+ << "ACPOMLCPPInterface: setting custom feature of type double in model "
+ << ModelName << "\n");
+ auto Find = ModelMap.find(ModelName);
+ if (Find == ModelMap.end()) {
+ LLVM_DEBUG(dbgs() << "ERROR in setCustomFeature: Model " << ModelName
+ << " has not been loaded\n");
+ return false;
+ }
+ int Index = Find->second->getIndex(FeatureID);
+ std::shared_ptr<llvm::ACPOModelRunner> Runner =
+ RunnerMap.find(ModelName)->second;
+ return Runner->setCustomFeature(Index, FeatureValue);
+}
+
+bool ACPOMLCPPInterface::setCustomFeature(std::string ModelName,
+ uint64_t FeatureID,
+ float FeatureValue) {
+ LLVM_DEBUG(
+ dbgs()
+ << "ACPOMLCPPInterface: setting custom feature of type float in model "
+ << ModelName << "\n");
+ auto Find = ModelMap.find(ModelName);
+ if (Find == ModelMap.end()) {
+ LLVM_DEBUG(dbgs() << "ERROR in setCustomFeature: Model " << ModelName
+ << " has not been loaded\n");
+ return false;
+ }
+ int Index = Find->second->getIndex(FeatureID);
+ std::shared_ptr<llvm::ACPOModelRunner> Runner =
+ RunnerMap.find(ModelName)->second;
+ return Runner->setCustomFeature(Index, FeatureValue);
+}
+
+bool ACPOMLCPPInterface::setCustomFeature(std::string ModelName,
+ uint64_t FeatureID,
+ bool FeatureValue) {
+ LLVM_DEBUG(
+ dbgs()
+ << "ACPOMLCPPInterface: setting custom feature of type bool in model "
+ << ModelName << "\n");
+ auto Find = ModelMap.find(ModelName);
+ if (Find == ModelMap.end()) {
+ LLVM_DEBUG(dbgs() << "ERROR in setCustomFeature: Model " << ModelName
+ << " has not been loaded\n");
+ return false;
+ }
+ int Index = Find->second->getIndex(FeatureID);
+ std::shared_ptr<llvm::ACPOModelRunner> Runner =
+ RunnerMap.find(ModelName)->second;
+ return Runner->setCustomFeature(Index, FeatureValue);
+}
+
+bool ACPOMLCPPInterface::setCustomFeature(std::string ModelName,
+ std::string FeatureName,
+ int FeatureValue) {
+ LLVM_DEBUG(
+ dbgs()
+ << "ACPOMLCPPInterface: setting custom feature of type int in model "
+ << ModelName << "\n");
+ auto Find = ModelMap.find(ModelName);
+ if (Find == ModelMap.end()) {
+ LLVM_DEBUG(dbgs() << "ERROR in setCustomFeature: Model " << ModelName
+ << " has not been loaded\n");
+ return false;
+ }
+ int Index = Find->second->getIndex(FeatureName);
+ std::shared_ptr<llvm::ACPOModelRunner> Runner =
+ RunnerMap.find(ModelName)->second;
+ return Runner->setCustomFeature(Index, FeatureValue);
+}
+
+bool ACPOMLCPPInterface::setCustomFeature(std::string ModelName,
+ std::string FeatureName,
+ int64_t FeatureValue) {
+ LLVM_DEBUG(
+ dbgs()
+ << "ACPOMLCPPInterface: setting custom feature of type int64 in model "
+ << ModelName << "\n");
+ auto Find = ModelMap.find(ModelName);
+ if (Find == ModelMap.end()) {
+ LLVM_DEBUG(dbgs() << "ERROR in setCustomFeature: Model " << ModelName
+ << " has not been loaded\n");
+ return false;
+ }
+ int Index = Find->second->getIndex(FeatureName);
+ std::shared_ptr<llvm::ACPOModelRunner> Runner =
+ RunnerMap.find(ModelName)->second;
+ return Runner->setCustomFeature(Index, FeatureValue);
+}
+
+bool ACPOMLCPPInterface::setCustomFeature(std::string ModelName,
+ std::string FeatureName,
+ double FeatureValue) {
+ LLVM_DEBUG(
+ dbgs()
+ << "ACPOMLCPPInterface: setting custom feature of type double in model "
+ << ModelName << "\n");
+ auto Find = ModelMap.find(ModelName);
+ if (Find == ModelMap.end()) {
+ LLVM_DEBUG(dbgs() << "ERROR in setCustomFeature: Model " << ModelName
+ << " has not been loaded\n");
+ return false;
+ }
+ int Index = Find->second->getIndex(FeatureName);
+ std::shared_ptr<llvm::ACPOModelRunner> Runner =
+ RunnerMap.find(ModelName)->second;
+ return Runner->setCustomFeature(Index, FeatureValue);
+}
+
+bool ACPOMLCPPInterface::setCustomFeature(std::string ModelName,
+ std::string FeatureName,
+ float FeatureValue) {
+ LLVM_DEBUG(
+ dbgs()
+ << "ACPOMLCPPInterface: setting custom feature of type float in model "
+ << ModelName << "\n");
+ auto Find = ModelMap.find(ModelName);
+ if (Find == ModelMap.end()) {
+ LLVM_DEBUG(dbgs() << "ERROR in setCustomFeature: Model " << ModelName
+ << " has not been loaded\n");
+ return false;
+ }
+ int Index = Find->second->getIndex(FeatureName);
+ std::shared_ptr<llvm::ACPOModelRunner> Runner =
+ RunnerMap.find(ModelName)->second;
+ return Runner->setCustomFeature(Index, FeatureValue);
+}
+
+bool ACPOMLCPPInterface::setCustomFeature(std::string ModelName,
+ std::string FeatureName,
+ bool FeatureValue) {
+ LLVM_DEBUG(
+ dbgs()
+ << "ACPOMLCPPInterface: setting custom feature of type bool in model "
+ << ModelName << "\n");
+ auto Find = ModelMap.find(ModelName);
+ if (Find == ModelMap.end()) {
+ LLVM_DEBUG(dbgs() << "ERROR in setCustomFeature: Model " << ModelName
+ << " has not been loaded\n");
+ return false;
+ }
+ int Index = Find->second->getIndex(FeatureName);
+ std::shared_ptr<llvm::ACPOModelRunner> Runner =
+ RunnerMap.find(ModelName)->second;
+ return Runner->setCustomFeature(Index, FeatureValue);
+}
+
+bool ACPOMLCPPInterface::initializeFeatures(
+ std::string ModelName,
+ const std::vector<std::pair<uint64_t, std::string>> &FeatureValues) {
+ LLVM_DEBUG(dbgs() << "Initializing features for model " << ModelName
+ << " using feature IDs\n");
+ auto Find = ModelMap.find(ModelName);
+ if (Find == ModelMap.end()) {
+ LLVM_DEBUG(dbgs() << "ERROR in initializeFeatures: Model " << ModelName
+ << " has not been loaded\n");
+ return false;
+ }
+ if (FeatureValues.size() > Find->second->getNumFeatures()) {
+ LLVM_DEBUG(dbgs() << "ERROR in initializeFeatures: Invalid features\n");
+ return false;
+ }
+ CurrentlyActiveModel = ModelName;
+ for (const auto &Feature : FeatureValues) {
+ uint64_t FeatureID = Feature.first;
+ std::string FeatureValue = Feature.second;
+
+ std::string FeatureType =
+ getInputType(ModelName, Find->second->getName(FeatureID));
+ if (FeatureType == "int64") {
+ int64_t Value = std::stoi(FeatureValue);
+ setCustomFeature(ModelName, FeatureID, Value);
+ } else if (FeatureType == "int32") {
+ int32_t Value = std::stoi(FeatureValue);
+ setCustomFeature(ModelName, FeatureID, Value);
+ } else if (FeatureType == "int") {
+ int Value = std::stoi(FeatureValue);
+ setCustomFeature(ModelName, FeatureID, Value);
+ } else if (FeatureType == "float64") {
+ double Value = std::stod(FeatureValue);
+ setCustomFeature(ModelName, FeatureID, Value);
+ } else if (FeatureType == "float32") {
+ float Value = std::stof(FeatureValue);
+ setCustomFeature(ModelName, FeatureID, Value);
+ } else {
+ LLVM_DEBUG(dbgs() << "ERROR in initializeFeatures: Invalid feature type "
+ << FeatureType << "\n");
+ return false;
+ }
+ }
+ return true;
+}
+
+bool ACPOMLCPPInterface::initializeFeatures(
+ std::string ModelName,
+ const std::vector<std::pair<std::string, std::string>> &FeatureValues) {
+ auto Find = ModelMap.find(ModelName);
+ LLVM_DEBUG(dbgs() << "Initializing features for model " << ModelName
+ << " using feature names\n");
+ if (Find == ModelMap.end()) {
+ LLVM_DEBUG(dbgs() << "ERROR in initializeFeatures: Model " << ModelName
+ << " has not been loaded\n");
+ return false;
+ }
+ if (FeatureValues.size() > Find->second->getNumFeatures()) {
+ LLVM_DEBUG(dbgs() << "ERROR in initializeFeatures: Invalid features\n");
+ return false;
+ }
+ CurrentlyActiveModel = ModelName;
+ for (const auto &Feature : FeatureValues) {
+ std::string FeatureName = Feature.first;
+ std::string FeatureValue = Feature.second;
+
+ std::string FeatureType = getInputType(ModelName, FeatureName);
+ if (FeatureType == "int64") {
+ int64_t Value = std::stol(FeatureValue);
+ setCustomFeature(ModelName, FeatureName, Value);
+ } else if (FeatureType == "int32") {
+ int32_t Value = std::stoi(FeatureValue);
+ setCustomFeature(ModelName, FeatureName, Value);
+ } else if (FeatureType == "int") {
+ int Value = std::stoi(FeatureValue);
+ setCustomFeature(ModelName, FeatureName, Value);
+ } else if (FeatureType == "float64") {
+ double Value = std::stod(FeatureValue);
+ setCustomFeature(ModelName, FeatureName, Value);
+ } else if (FeatureType == "float32") {
+ float Value = std::stof(FeatureValue);
+ setCustomFeature(ModelName, FeatureName, Value);
+ } else {
+ LLVM_DEBUG(dbgs() << "ERROR in initializeFeatures: Invalid feature type "
+ << FeatureType << "\n");
+ return false;
+ }
+ }
+ return true;
+}
+
+bool ACPOMLCPPInterface::setCustomFeatures(
+ std::string ModelName,
+ const std::vector<std::pair<uint64_t, std::string>> &FeatureValues) {
+ if (ModelName != CurrentlyActiveModel) {
+ LLVM_DEBUG(dbgs() << "ERROR in setCustomFeatures: Model " << ModelName
+ << " has not been loaded or is not active\n");
+ return false;
+ }
+ auto Find = ModelMap.find(ModelName);
+ if (FeatureValues.size() > Find->second->getNumFeatures()) {
+ LLVM_DEBUG(dbgs() << "ERROR in setCustomFeatures: Invalid features\n");
+ return false;
+ }
+ std::string Command = "SetCustomFeatures";
+ for (const auto &Feature : FeatureValues) {
+ uint64_t FeatureID = Feature.first;
+ std::string FeatureValue = Feature.second;
+
+ std::string FeatureType =
+ getInputType(ModelName, Find->second->getName(FeatureID));
+ if (FeatureType == "int64") {
+ int64_t Value = std::stol(FeatureValue);
+ setCustomFeature(ModelName, FeatureID, Value);
+ } else if (FeatureType == "int32") {
+ int32_t Value = std::stoi(FeatureValue);
+ setCustomFeature(ModelName, FeatureID, Value);
+ } else if (FeatureType == "int") {
+ int Value = std::stoi(FeatureValue);
+ setCustomFeature(ModelName, FeatureID, Value);
+ } else if (FeatureType == "float64") {
+ double Value = std::stod(FeatureValue);
+ setCustomFeature(ModelName, FeatureID, Value);
+ } else if (FeatureType == "float32") {
+ float Value = std::stof(FeatureValue);
+ setCustomFeature(ModelName, FeatureID, Value);
+ } else {
+ LLVM_DEBUG(dbgs() << "ERROR in setCustomFeatures: Invalid feature type "
+ << FeatureType << "\n");
+ return false;
+ }
+ }
+ return true;
+}
+
+bool ACPOMLCPPInterface::setCustomFeatures(
+ std::string ModelName,
+ const std::vector<std::pair<std::string, std::string>> &FeatureValues) {
+ if (ModelName != CurrentlyActiveModel) {
+ LLVM_DEBUG(dbgs() << "ERROR in setCustomFeatures: Model " << ModelName
+ << " has not been loaded or is not active\n");
+ return false;
+ }
+ auto Find = ModelMap.find(ModelName);
+ if (FeatureValues.size() > Find->second->getNumFeatures()) {
+ LLVM_DEBUG(dbgs() << "ERROR in setCustomFeatures: Invalid features\n");
+ return false;
+ }
+ std::string Command = "SetCustomFeatures";
+ for (const auto &Feature : FeatureValues) {
+ std::string FeatureName = Feature.first;
+ std::string FeatureValueStr = Feature.second;
+
+ std::string FeatureType = getInputType(ModelName, FeatureName);
+ if (FeatureType == "int64") {
+ int64_t FeatureValue = std::stoi(FeatureValueStr);
+ setCustomFeature(ModelName, FeatureName, FeatureValue);
+ } else if (FeatureType == "int32") {
+ int32_t FeatureValue = std::stoi(FeatureValueStr);
+ setCustomFeature(ModelName, FeatureName, FeatureValue);
+ } else if (FeatureType == "int") {
+ int FeatureValue = std::stoi(FeatureValueStr);
+ setCustomFeature(ModelName, FeatureName, FeatureValue);
+ } else if (FeatureType == "float64") {
+ double FeatureValue = std::stod(FeatureValueStr);
+ setCustomFeature(ModelName, FeatureName, FeatureValue);
+ } else if (FeatureType == "float32") {
+ float FeatureValue = std::stof(FeatureValueStr);
+ setCustomFeature(ModelName, FeatureName, FeatureValue);
+ } else {
+ LLVM_DEBUG(dbgs() << "ERROR in setCustomFeatures: Invalid feature type "
+ << FeatureType << "\n");
+ return false;
+ }
+ }
+ return true;
+}
+
+bool ACPOMLCPPInterface::runModel(std::string ModelName) {
+ if (ModelName != CurrentlyActiveModel) {
+ LLVM_DEBUG(dbgs() << "ERROR in runModel: Model " << ModelName
+ << " is not active\n");
+ return false;
+ }
+ std::shared_ptr<llvm::ACPOModelRunner> Runner =
+ RunnerMap.find(CurrentlyActiveModel)->second;
+ return Runner->runModel();
+}
+
+std::string ACPOMLCPPInterface::getInputType(std::string ModelName,
+ std::string InputName) {
+ auto Find = ModelMap.find(ModelName);
+ assert(Find != ModelMap.end());
+ return Find->second->getInputType(InputName);
+}
+
+std::string ACPOMLCPPInterface::getOutputType(std::string ModelName,
+ std::string OutputName) {
+ auto Find = ModelMap.find(ModelName);
+ assert(Find != ModelMap.end());
+ return Find->second->getOutputType(OutputName);
+}
+
+int ACPOMLCPPInterface::getModelResultI(std::string OutputName) {
+ std::shared_ptr<llvm::ACPOModelRunner> Runner =
+ RunnerMap.find(CurrentlyActiveModel)->second;
+ return Runner->getModelResultI(OutputName);
+}
+
+int64_t ACPOMLCPPInterface::getModelResultI64(std::string OutputName) {
+ std::shared_ptr<llvm::ACPOModelRunner> Runner =
+ RunnerMap.find(CurrentlyActiveModel)->second;
+ return Runner->getModelResultI64(OutputName);
+}
+
+float ACPOMLCPPInterface::getModelResultF(std::string OutputName) {
+ std::shared_ptr<llvm::ACPOModelRunner> Runner =
+ RunnerMap.find(CurrentlyActiveModel)->second;
+ return Runner->getModelResultF(OutputName);
+}
+
+double ACPOMLCPPInterface::getModelResultD(std::string OutputName) {
+ std::shared_ptr<llvm::ACPOModelRunner> Runner =
+ RunnerMap.find(CurrentlyActiveModel)->second;
+ return Runner->getModelResultD(OutputName);
+}
+
+bool ACPOMLCPPInterface::getModelResultB(std::string OutputName) {
+ std::shared_ptr<llvm::ACPOModelRunner> Runner =
+ RunnerMap.find(CurrentlyActiveModel)->second;
+ return Runner->getModelResultB(OutputName);
+}
+
+int ACPOMLCPPInterface::getStatus() { return 1; }
+
+bool ACPOMLCPPInterface::releaseModel(std::string ModelName) {
+ ModelMap.erase(ModelName);
+ RunnerMap.erase(ModelName);
+ CurrentlyActiveModel = "";
+ return true;
+}
+
+bool ACPOMLCPPInterface::closeMLInterface() { return true; }
+
+std::string ACPOMLCPPInterface::readModelParam(std::string FilePath,
+ std::string Param) {
+ std::optional<std::string> Env = llvm::sys::Process::GetEnv(ACPO_ENV_VAR_DIR);
+ if (!Env || *Env == "") {
+ std::optional<std::string> LLVMDIROpt =
+ llvm::sys::Process::GetEnv("LLVM_DIR");
+ if (LLVMDIROpt) {
+ Env = *LLVMDIROpt + "/acpo/";
+ } else {
+ return "";
+ }
+ }
+
+ FilePath = *Env + "/" + FilePath;
+
+ std::ifstream FileStream{FilePath};
+
+ std::string Line;
+ while (std::getline(FileStream, Line)) {
+ if (Line.rfind(Param, 0) == 0) {
+ return Line.substr(Param.size() + 1);
+ }
+ }
+ return "";
+}
+
+void ACPOMLCPPInterface::readFeatures(
+ std::string FilePath,
+ std::vector<std::pair<std::string, std::string>> &Features) {
+ std::string Line = readModelParam(FilePath, "Features");
+ while (!Line.empty()) {
+ // This reads the features, assuming each feature is written as
+ // {feature_name, feature_type}
+ size_t LeftBracket = Line.find("{");
+ size_t Comma = Line.find(",", LeftBracket);
+ size_t Space = Line.find(" ", Comma);
+ size_t RightBracket = Line.find("}", Space);
+ if (LeftBracket == Line.size() || Comma == Line.size() ||
+ Space == Line.size() || RightBracket == Line.size()) {
+ break;
+ }
+ std::string Feature = Line.substr(LeftBracket + 1, Comma - LeftBracket - 1);
+ std::string Type = Line.substr(Space + 1, RightBracket - Space - 1);
+
+ Features.emplace_back(std::make_pair(Feature, Type));
+ int oldLength = Line.size();
+ Line = Line.substr(RightBracket + 1);
+ int newLength = Line.size();
+ if (oldLength == newLength)
+ break;
+ }
+}
+
+void ACPOMLCPPInterface::readOutputs(
+ std::string FilePath,
+ std::vector<std::pair<std::string, std::string>> &Outputs) {
+ std::string Line = readModelParam(FilePath, "Outputs");
+ while (!Line.empty()) {
+ // This reads the features, assuming each feature is written as
+ // {feature_name, feature_type}
+ size_t LeftBracket = Line.find("{");
+ size_t Comma = Line.find(",", LeftBracket);
+ size_t Space = Line.find(" ", Comma);
+ size_t RightBracket = Line.find("}", Space);
+ if (LeftBracket == Line.size() || Comma == Line.size() ||
+ Space == Line.size() || RightBracket == Line.size()) {
+ break;
+ }
+ std::string Output = Line.substr(LeftBracket + 1, Comma - LeftBracket - 1);
+ std::string Type = Line.substr(Space + 1, RightBracket - Space - 1);
+
+ Outputs.emplace_back(std::make_pair(Output, Type));
+ int oldLength = Line.size();
+ Line = Line.substr(RightBracket + 1);
+ int newLength = Line.size();
+ if (oldLength == newLength)
+ break;
+ }
+}
+
+std::shared_ptr<ACPOMLInterface> llvm::createPersistentCompiledMLIF() {
+ if (PersistentMLIF == nullptr) {
+ PersistentMLIF = std::make_shared<ACPOMLCPPInterface>();
+ if (!PersistentMLIF->isInitialized())
+ PersistentMLIF = nullptr;
+ }
+ return PersistentMLIF;
+}
+
+#ifdef LLVM_HAVE_TF_AOT_FICOMPILEDMODEL
+std::unique_ptr<ACPOModelRunner>
+createFI(std::vector<std::pair<std::string, std::string>> Inputs,
+ StringRef Decision) {
+ // Context does not ever seem to be used in the model runner,
+ // so for now just create an empty context object
+ LLVMContext Ctx;
+ return std::make_unique<FIModelRunner>(Ctx, Inputs, Decision);
+}
+#endif
+
+// Generate map using ifdefs for now, in the future we could have this
+// automatically populate using macros
+const std::unordered_map<std::string,
+ ACPOMLCPPInterface::CreateModelRunnerFunction>
+ ACPOMLCPPInterface::CreateModelRunnerMap = {
+#ifdef LLVM_HAVE_TF_AOT_FICOMPILEDMODEL
+ {"FI", createFI},
+#endif
+};
diff --git a/llvm/lib/Analysis/ACPOModel.cpp b/llvm/lib/Analysis/ACPOModel.cpp
new file mode 100644
index 000000000000..2d0dae733943
--- /dev/null
+++ b/llvm/lib/Analysis/ACPOModel.cpp
@@ -0,0 +1,63 @@
+//===- ACPOModel.cpp - AI-Enabled Continuous Program Optimization ---------===//
+//
+// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
+// See https://llvm.org/LICENSE.txt for license information.
+// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
+//
+//===----------------------------------------------------------------------===//
+//
+// This file implements the interface between ACPO and ML-guided optimizations.
+// It delegates decision making to inference with a pre-trained model.
+//
+//===----------------------------------------------------------------------===//
+
+#include "llvm/Analysis/ACPOModel.h"
+#include "llvm/Analysis/LoopInfo.h"
+#include "llvm/Analysis/OptimizationRemarkEmitter.h"
+#include "llvm/Support/Debug.h"
+#include <memory>
+
+using namespace llvm;
+
+#define DEBUG_TYPE "acpo"
+
+ACPOAdvice::ACPOAdvice(std::unique_ptr<ACPOAdvice> &ResultFormat) {
+ assert(ResultFormat != nullptr);
+ for (auto &Entry : ResultFormat->getFieldMap()) {
+ reserveField(Entry.first, Entry.second.T);
+ }
+}
+
+void ACPOModel::prepareModelInput() {}
+
+bool ACPOModel::runModel(std::unique_ptr<ACPOAdvice> &Result) { return true; }
+
+void ACPOModel::addRequiredResultField(std::string name, Type::TypeID &ID) {
+ ResultFormat->reserveField(name, ID);
+}
+
+std::unique_ptr<ACPOAdvice> ACPOModel::getAdvice() {
+ if (ShouldUseML)
+ return getAdviceML();
+ else
+ return getAdviceNoML();
+}
+
+std::unique_ptr<ACPOAdvice> ACPOModel::getAdviceML() {
+ // This needs to be filled with a mechanism to invoke a model selected
+ // using the ModelRunner.
+ sendCustomFeatures();
+ prepareModelInput();
+ std::unique_ptr<ACPOAdvice> Result =
+ std::make_unique<ACPOAdvice>(ResultFormat);
+
+ if (runModel(Result))
+ return Result;
+ else
+ return nullptr;
+}
+
+void ACPOModel::addFeature(int64_t ID, Constant *Val) {
+ assert(CustomFeatureMap.find(ID) == CustomFeatureMap.end());
+ CustomFeatureMap[ID] = Val;
+}
diff --git a/llvm/lib/Analysis/CMakeLists.txt b/llvm/lib/Analysis/CMakeLists.txt
index 9c6a70f0221f..961b5037dd48 100644
--- a/llvm/lib/Analysis/CMakeLists.txt
+++ b/llvm/lib/Analysis/CMakeLists.txt
@@ -4,6 +4,30 @@ if (DEFINED LLVM_HAVE_TF_AOT OR LLVM_HAVE_TFLITE)
set(LLVM_INLINER_MODEL_CURRENT_URL "<UNSPECIFIED>" CACHE STRING "URL to download the LLVM inliner model")
+ if (ACPO_AOT)
+ foreach (model_name model_path model_signature IN ZIP_LISTS LLVM_ACPO_MODEL_NAMES LLVM_ACPO_MODEL_PATHS LLVM_ACPO_MODEL_SIGNATURES)
+ set(fname ${model_name}CompiledModel)
+ string(TOUPPER ${fname} fname_allcaps)
+ if (LLVM_ACPO_OVERRIDE)
+ string(TOUPPER ${LLVM_ACPO_OVERRIDE_ARCH} arch_allcaps)
+ set(LLVM_OVERRIDE_MODEL_HEADER_${fname_allcaps}
+ ${LLVM_ACPO_OVERRIDE_PATH}/${fname}-${arch_allcaps}.h)
+ set(LLVM_OVERRIDE_MODEL_OBJECT_${fname_allcaps}
+ ${LLVM_ACPO_OVERRIDE_PATH}/${fname}-${arch_allcaps}.o)
+ endif()
+
+ tf_find_and_compile(
+ ${model_path}
+ ${LLVM_INLINER_MODEL_CURRENT_URL}
+ ${LLVM_INLINER_MODEL_PATH_DEFAULT}
+ ""
+ serve
+ "${model_signature}"
+ "${fname}"
+ "llvm::${fname}"
+ )
+ endforeach()
+ endif()
if (DEFINED LLVM_HAVE_TF_AOT)
tf_find_and_compile(
${LLVM_INLINER_MODEL_PATH}
@@ -24,6 +48,10 @@ if (DEFINED LLVM_HAVE_TF_AOT OR LLVM_HAVE_TFLITE)
endif()
add_llvm_component_library(LLVMAnalysis
+ ACPOCollectFeatures.cpp
+ ACPOFIModel.cpp
+ ACPOMLInterface.cpp
+ ACPOModel.cpp
AliasAnalysis.cpp
AliasAnalysisEvaluator.cpp
AliasSetTracker.cpp
@@ -41,6 +69,7 @@ add_llvm_component_library(LLVMAnalysis
CGSCCPassManager.cpp
CallGraph.cpp
CallGraphSCCPass.cpp
+ CallHeight.cpp
CallPrinter.cpp
CaptureTracking.cpp
CmpInstAnalysis.cpp
@@ -59,6 +88,8 @@ add_llvm_component_library(LLVMAnalysis
DomPrinter.cpp
DomTreeUpdater.cpp
DominanceFrontier.cpp
+ DumpCallsite.cpp
+ DumpFeature.cpp
FunctionPropertiesAnalysis.cpp
GlobalsModRef.cpp
GuardUtils.cpp
@@ -100,6 +131,7 @@ add_llvm_component_library(LLVMAnalysis
MemoryProfileInfo.cpp
MemorySSA.cpp
MemorySSAUpdater.cpp
+ ModelDataCollector.cpp
ModelUnderTrainingRunner.cpp
ModuleDebugInfoPrinter.cpp
ModuleSummaryAnalysis.cpp
diff --git a/llvm/lib/Analysis/CallHeight.cpp b/llvm/lib/Analysis/CallHeight.cpp
new file mode 100644
index 000000000000..f7b88cbdff05
--- /dev/null
+++ b/llvm/lib/Analysis/CallHeight.cpp
@@ -0,0 +1,89 @@
+//===- CallHeight.cpp - CallHeight implementation ------------------------===//
+//
+// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
+// See https://llvm.org/LICENSE.txt for license information.
+// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
+//
+//===----------------------------------------------------------------------===//
+//
+// This file implements getting the call height of functions in a module.
+//
+//===----------------------------------------------------------------------===//
+#include "llvm/Analysis/CallHeight.h"
+#include "llvm/ADT/SCCIterator.h"
+#include "llvm/Analysis/CallGraph.h"
+#include "llvm/IR/InstIterator.h"
+#include "llvm/InitializePasses.h"
+
+using namespace llvm;
+
+#define DEBUG_TYPE "call-height"
+
+// Adapted from MLInlineAdvisor.cpp
+CallBase *getInlinableCallSite(Instruction &I) {
+ if (auto *CS = dyn_cast<CallBase>(&I)) {
+ if (Function *Callee = CS->getCalledFunction())
+ if (!Callee->isDeclaration()) {
+ return CS;
+ }
+ }
+ return nullptr;
+}
+
+unsigned CallHeight::getLevel(Function &F) { return (*Levels)[&F]; }
+
+CallHeight::CallHeight(Module &M)
+ : Levels(std::make_unique<std::map<const Function *, unsigned>>()) {
+ // Adapted from MLInlineAdvisor.cpp
+ CallGraph CG = CallGraph(M);
+
+ for (auto I = scc_begin(&CG); !I.isAtEnd(); ++I) {
+ const std::vector<CallGraphNode *> &CGNodes = *I;
+ unsigned Level = 0;
+ for (auto *CGNode : CGNodes) {
+ Function *F = CGNode->getFunction();
+ if (!F || F->isDeclaration())
+ continue;
+ for (auto &I : instructions(F)) {
+ if (auto *CS = getInlinableCallSite(I)) {
+ auto *Called = CS->getCalledFunction();
+ auto Pos = Levels->find(Called);
+ // In bottom up traversal, an inlinable callee is either in the
+ // same SCC, or to a function in a visited SCC. So not finding its
+ // level means we haven't visited it yet, meaning it's in this SCC.
+ if (Pos == Levels->end())
+ continue;
+ Level = std::max(Level, Pos->second + 1);
+ }
+ }
+ }
+ for (auto *CGNode : CGNodes) {
+ Function *F = CGNode->getFunction();
+ if (F && !F->isDeclaration())
+ (*Levels)[F] = Level;
+ }
+ }
+}
+
+AnalysisKey CallHeightAnalysis::Key;
+
+CallHeight CallHeightAnalysis::run(Module &M, ModuleAnalysisManager &MAM) {
+ return CallHeight(M);
+}
+
+bool CallHeightAnalysisWrapper::runOnModule(Module &M) {
+ Result.reset(new CallHeight(M));
+ return false;
+}
+
+void CallHeightAnalysisWrapper::getAnalysisUsage(AnalysisUsage &AU) const {
+ AU.setPreservesAll();
+}
+
+char CallHeightAnalysisWrapper::ID = 0;
+INITIALIZE_PASS(CallHeightAnalysisWrapper, DEBUG_TYPE, "Call Height Analysis",
+ false, true)
+
+Pass *llvm::createCallHeightAnalysisWrapper() {
+ return new CallHeightAnalysisWrapper();
+}
diff --git a/llvm/lib/Analysis/DumpCallsite.cpp b/llvm/lib/Analysis/DumpCallsite.cpp
new file mode 100644
index 000000000000..d49885a372f2
--- /dev/null
+++ b/llvm/lib/Analysis/DumpCallsite.cpp
@@ -0,0 +1,82 @@
+//===- DumpCallsite.cpp - DumpCallsite implementation --------------------===//
+//
+// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
+// See https://llvm.org/LICENSE.txt for license information.
+// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
+//
+//===----------------------------------------------------------------------===//
+//
+// This file implements the ability to dump all callsites in a given function.
+//
+//===----------------------------------------------------------------------===//
+#include "llvm/Analysis/DumpCallsite.h"
+#include "llvm/IR/Function.h"
+#include "llvm/IR/InstIterator.h"
+#include "llvm/IR/Instructions.h"
+#include "llvm/IR/LegacyPassManager.h"
+#include "llvm/InitializePasses.h"
+#include "llvm/Pass.h"
+#include "llvm/Support/CommandLine.h"
+
+using namespace llvm;
+
+static cl::opt<bool>
+ IncludeDeclaration("include-declaration", cl::Hidden,
+ cl::desc("Also dump declaration in dump-callsite pass"));
+
+namespace {
+
+// Implementation of actual DumpCallsite
+class DumpCallsite {
+public:
+ void run(Function &F);
+};
+
+// Wrapper for legacy PM
+class DumpCallsiteLegacy : public FunctionPass {
+public:
+ static char ID;
+ DumpCallsiteLegacy() : FunctionPass(ID) {}
+
+ bool runOnFunction(Function &F) override;
+};
+
+void DumpCallsite::run(Function &F) {
+ outs() << F.getName();
+ // Get all callees from 'call' inst
+ for (auto &I : instructions(F)) {
+ // Is a call inst
+ if (auto *CS = dyn_cast<CallBase>(&I)) {
+ // callee is present
+ if (Function *Callee = CS->getCalledFunction()) {
+ // Not intrinsic
+ if (!Callee->isIntrinsic()) {
+ // decide whether to dump declaration
+ if (!Callee->isDeclaration() || IncludeDeclaration) {
+ outs() << " " << Callee->getName();
+ }
+ }
+ }
+ }
+ }
+ outs() << "\n";
+}
+
+bool DumpCallsiteLegacy::runOnFunction(Function &F) {
+ DumpCallsite Impl;
+ Impl.run(F);
+ return false;
+}
+
+} // namespace
+
+char DumpCallsiteLegacy::ID = 0;
+INITIALIZE_PASS(DumpCallsiteLegacy, "dump-callsite", "Dump Callsite", false,
+ false)
+
+PreservedAnalyses DumpCallsitePass::run(Function &F,
+ FunctionAnalysisManager &FAM) {
+ DumpCallsite Impl;
+ Impl.run(F);
+ return PreservedAnalyses::all();
+}
diff --git a/llvm/lib/Analysis/DumpFeature.cpp b/llvm/lib/Analysis/DumpFeature.cpp
new file mode 100644
index 000000000000..81756226c2fd
--- /dev/null
+++ b/llvm/lib/Analysis/DumpFeature.cpp
@@ -0,0 +1,575 @@
+//===- DumpFeature.cpp - DumpFeature implementation -----------------------===//
+//
+// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
+// See https://llvm.org/LICENSE.txt for license information.
+// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
+//
+//===----------------------------------------------------------------------===//
+//
+// This file implements dumping features for functions in an scc.
+//
+//===----------------------------------------------------------------------===//
+#include "llvm/Analysis/DumpFeature.h"
+#include "llvm/ADT/SCCIterator.h"
+#include "llvm/Analysis/CallHeight.h"
+#include "llvm/Analysis/TargetLibraryInfo.h"
+#include "llvm/IR/BasicBlock.h"
+#include "llvm/IR/Function.h"
+#include "llvm/IR/Instructions.h"
+#include "llvm/IR/PassManager.h"
+#include "llvm/InitializePasses.h"
+#include "llvm/MC/MCAsmLayout.h"
+#include "llvm/Support/Casting.h"
+#include "llvm/Support/CommandLine.h"
+#include "llvm/Support/FileSystem.h"
+#include "llvm/Support/raw_ostream.h"
+
+#include <algorithm>
+#include <deque>
+#include <vector>
+
+using namespace llvm;
+
+bool EnableFeatureDump;
+static cl::opt<bool, true> EnableFeatureDumpFlag(
+ "enable-feature-dump", cl::location(EnableFeatureDump), cl::init(false),
+ cl::Hidden, cl::ZeroOrMore, cl::desc("Enable Feature Dump"));
+
+static cl::opt<bool>
+ CheckPairHisto("check-pair-histo", cl::Hidden,
+ cl::desc("Dump instruction pairs in the histogram"));
+
+static cl::opt<bool> Verbose("dump-verbose", cl::Hidden,
+ cl::desc("Dump as human readable format"));
+
+static llvm::cl::opt<std::string>
+ OutFile("feature-output", llvm::cl::desc("File for outputting features"),
+ llvm::cl::init("features.csv"));
+
+namespace {
+unsigned getMaxInstructionID() {
+#define LAST_OTHER_INST(NR) return NR;
+#include "llvm/IR/Instruction.def"
+}
+
+// This is a point in time - we determined including these pairs of
+// consecutive instructions (in the IR layout available at inline time) as
+// features improves the model performance. We want to move away from manual
+// feature selection.
+// The array is given in opcode pairs rather than labels because 1) labels
+// weren't readily available, and 2) the successions were hand - extracted.
+//
+// This array must be sorted.
+static const std::array<std::pair<size_t, size_t>, 137>
+ ImportantInstructionSuccessions{
+ {{1, 1}, {1, 4}, {1, 5}, {1, 7}, {1, 8}, {1, 9}, {1, 11},
+ {1, 12}, {1, 13}, {1, 14}, {1, 18}, {1, 20}, {1, 22}, {1, 24},
+ {1, 25}, {1, 26}, {1, 27}, {1, 28}, {1, 29}, {1, 30}, {1, 31},
+ {1, 32}, {1, 33}, {1, 34}, {1, 39}, {1, 40}, {1, 42}, {1, 45},
+ {2, 1}, {2, 2}, {2, 13}, {2, 28}, {2, 29}, {2, 32}, {2, 33},
+ {2, 34}, {2, 38}, {2, 48}, {2, 49}, {2, 53}, {2, 55}, {2, 56},
+ {13, 2}, {13, 13}, {13, 26}, {13, 33}, {13, 34}, {13, 56}, {15, 27},
+ {28, 2}, {28, 48}, {28, 53}, {29, 2}, {29, 33}, {29, 56}, {31, 31},
+ {31, 33}, {31, 34}, {31, 49}, {32, 1}, {32, 2}, {32, 13}, {32, 15},
+ {32, 28}, {32, 29}, {32, 32}, {32, 33}, {32, 34}, {32, 39}, {32, 40},
+ {32, 48}, {32, 49}, {32, 53}, {32, 56}, {33, 1}, {33, 2}, {33, 32},
+ {33, 33}, {33, 34}, {33, 49}, {33, 53}, {33, 56}, {34, 1}, {34, 2},
+ {34, 32}, {34, 33}, {34, 34}, {34, 49}, {34, 53}, {34, 56}, {38, 34},
+ {39, 57}, {40, 34}, {47, 15}, {47, 49}, {48, 2}, {48, 34}, {48, 56},
+ {49, 1}, {49, 2}, {49, 28}, {49, 32}, {49, 33}, {49, 34}, {49, 39},
+ {49, 49}, {49, 56}, {53, 1}, {53, 2}, {53, 28}, {53, 34}, {53, 53},
+ {53, 57}, {55, 1}, {55, 28}, {55, 34}, {55, 53}, {55, 55}, {55, 56},
+ {56, 1}, {56, 2}, {56, 7}, {56, 13}, {56, 32}, {56, 33}, {56, 34},
+ {56, 49}, {56, 53}, {56, 56}, {56, 64}, {57, 34}, {57, 56}, {57, 57},
+ {64, 1}, {64, 64}, {65, 1}, {65, 65}}};
+
+size_t getSize(Function &F, TargetTransformInfo &TTI) {
+ size_t SumOfAllInstCost = 0;
+ for (const auto &BB : F)
+ for (const auto &I : BB) {
+ std::optional<long int> cost =
+ TTI.getInstructionCost(
+ &I, TargetTransformInfo::TargetCostKind::TCK_CodeSize)
+ .getValue();
+ if (cost.has_value())
+ SumOfAllInstCost += cost.value();
+ }
+ return SumOfAllInstCost;
+}
+
+unsigned getMaxDominatorTreeDepth(const Function &F,
+ const DominatorTree &Tree) {
+ unsigned MaxBBDepth = 0;
+ for (const auto &BB : F)
+ if (const auto *TN = Tree.getNode(&BB))
+ MaxBBDepth = std::max(MaxBBDepth, TN->getLevel());
+
+ return MaxBBDepth;
+}
+
+// get valid call uses and valid call uses in loop counts.
+std::pair<int, int>
+getValidCallUsesAndInLoopCounts(Function &F,
+ FunctionAnalysisManager *FAM = nullptr) {
+ unsigned CallUses = 0;
+ unsigned CallUsesInLoop = 0;
+
+ for (User *U : F.users()) {
+ if (CallBase *CB = dyn_cast<CallBase>(U)) {
+ ++CallUses;
+ BasicBlock *BB = CB->getParent();
+ Function *FUser = CB->getCaller();
+ auto &LI =
+ FAM->getResult<LoopAnalysis>(*FUser);
+ if (LI.getLoopFor(BB) != nullptr) {
+ ++CallUsesInLoop;
+ }
+ }
+ }
+ return std::make_pair(CallUses, CallUsesInLoop);
+}
+} // namespace
+
+// We have: 9 calculated features (the features here); 1 feature for each
+// instruction opcode; and 1 feature for each manually-identified sequence.
+// For the latter 2, we build a histogram: we count the number of
+// occurrences of each instruction opcode or succession of instructions,
+// respectively.
+// Note that instruction opcodes start from 1. For convenience, we also have
+// an always 0 feature for the '0' opcode, hence the extra 1.
+const size_t ACPOFIExtendedFeatures::FunctionFeatures::FeatureCount =
+ ImportantInstructionSuccessions.size() + getMaxInstructionID() + 1 +
+ static_cast<size_t>(
+ ACPOFIExtendedFeatures::NamedFloatFeatureIndex::NumNamedFloatFeatures) +
+ static_cast<size_t>(
+ ACPOFIExtendedFeatures::NamedFeatureIndex::NumNamedFeatures);
+
+void ACPOFIExtendedFeatures::updateLoopRelatedFeatures(Function &F,
+ LoopInfo &LI,
+ FunctionFeatures &FF) {
+ uint64_t LoopNum = std::distance(LI.begin(), LI.end());
+
+ uint64_t LoopInstrCount = 0;
+ uint64_t BlockWithMulSuccNum = 0;
+ uint64_t LoopLevelSum = 0;
+ for (auto &L : LI) {
+ LoopLevelSum += static_cast<uint64_t>(L->getLoopDepth());
+ FF[NamedFeatureIndex::MaxLoopDepth] =
+ std::max(FF[NamedFeatureIndex::MaxLoopDepth],
+ static_cast<uint64_t>(L->getLoopDepth()));
+ for (const BasicBlock *BB : L->getBlocks()) {
+ unsigned SuccCount = std::distance(succ_begin(BB), succ_end(BB));
+ if (SuccCount > 1)
+ BlockWithMulSuccNum++;
+ LoopInstrCount += std::distance(BB->instructionsWithoutDebug().begin(),
+ BB->instructionsWithoutDebug().end());
+ }
+ }
+
+ FF[NamedFeatureIndex::Loops] = LoopNum;
+ if (LoopNum != 0) {
+ uint64_t q = LoopInstrCount / LoopNum;
+ FF[NamedFloatFeatureIndex::InstrPerLoop] =
+ q + ((float)(LoopInstrCount - q * LoopNum)) / LoopNum;
+ q = BlockWithMulSuccNum / LoopNum;
+ FF[NamedFloatFeatureIndex::BlockWithMultipleSuccecorsPerLoop] =
+ q + ((float)(BlockWithMulSuccNum - q * LoopNum)) / LoopNum;
+ q = LoopLevelSum / LoopNum;
+ FF[NamedFloatFeatureIndex::AvgNestedLoopLevel] =
+ q + ((float)(LoopLevelSum - q * LoopNum)) / LoopNum;
+ }
+}
+
+void ACPOFIExtendedFeatures::updateBBLoopCallsiteBFFeatures(
+ Function &F, FunctionFeatures &FF, LoopInfo &LI,
+ FunctionAnalysisManager *FAM) {
+ // Initializations before looping
+ unsigned NumCallsiteInLoop = 0;
+ unsigned NumCallsite = 0;
+ uint64_t MaxCallsiteBlockFreq = 0;
+ uint64_t InstrNum = 0;
+ uint64_t SuccNum = 0;
+ uint64_t VecNum = 0;
+ uint64_t BlockNum = F.size();
+ auto getPairIndex = [](size_t a, size_t b) {
+ auto I = llvm::find(ImportantInstructionSuccessions, std::make_pair(a, b));
+ if (I == ImportantInstructionSuccessions.end())
+ return -1;
+ return static_cast<int>(
+ std::distance(ImportantInstructionSuccessions.begin(), I));
+ };
+ int StartID = 0;
+ int LastID = StartID;
+
+ // We don't want debug calls, because they'd just add noise.
+ // Sum number of instructions and successors on the way
+ for (auto &BB : F) {
+ SuccNum += std::distance(succ_begin(&BB), succ_end(&BB));
+ for (auto &I : BB.instructionsWithoutDebug()) {
+ if (CallBase *CB = dyn_cast<CallBase>(&I)) {
+ Function *Callee = CB->getCalledFunction();
+ if (Callee && !Callee->isIntrinsic()) {
+ ++NumCallsite;
+ if (!Callee->isDeclaration()) {
+ // Check all the functions that was called and get the max block
+ // frequency.
+ uint64_t EntryFreq =
+ FAM->getResult<BlockFrequencyAnalysis>(*Callee)
+ .getEntryFreq();
+ MaxCallsiteBlockFreq = std::max(EntryFreq, MaxCallsiteBlockFreq);
+ }
+
+ if (Callee != nullptr) {
+ // Collect the number of callsites that were invoked with a pointer
+ // argument.
+ for (auto arg = Callee->arg_begin(); arg != Callee->arg_end();
+ arg++)
+ if (isa<PointerType>(arg->getType())) {
+ FF[NamedFeatureIndex::PtrCallee]++;
+ break;
+ }
+ }
+
+ // Collect the number of callsites that returns a pointer type.
+ if (isa<PointerType>(CB->getType())) {
+ FF[NamedFeatureIndex::CallReturnPtr]++;
+ }
+
+ // Check if the given function is recursive.
+ if (&F == Callee) {
+ FF[NamedFeatureIndex::IsRecursive] = 1;
+ }
+
+ BasicBlock *BB = CB->getParent();
+ // if we found a loop for the BB that Call is in, we do +1
+ if (LI.getLoopFor(BB) != nullptr) {
+ ++NumCallsiteInLoop;
+ }
+ }
+ }
+
+ auto ID = I.getOpcode();
+ ++FF.InstructionHistogram[ID];
+ int PairIndex = getPairIndex(LastID, ID);
+ if (PairIndex >= 0)
+ ++FF.InstructionPairHistogram[PairIndex];
+ LastID = ID;
+ InstrNum++;
+ unsigned NumOp = I.getNumOperands();
+
+ // If instruction contains vector operand, consider it as a vector
+ // instruction
+ for (unsigned i = 0; i < NumOp; i++) {
+ if (isa<VectorType>(I.getOperand(i)->getType())) {
+ VecNum++;
+ break;
+ }
+ }
+
+ // If this is a conditional branch, check if it uses an argument
+ if (const auto II = dyn_cast<BranchInst>(&I))
+ if (II->isConditional()) {
+ FF[NamedFeatureIndex::ConditionalBranch]++;
+ // find the instruction where the condition is defined.
+ if (auto def = dyn_cast<Instruction>(II->getCondition())) {
+ // For all operands of def check if isa<Argument> (operand) then
+ // increment CBwithArg.
+ bool found = false;
+ for (unsigned i = 0; i < def->getNumOperands(); i++) {
+ if (isa<Argument>(def->getOperand(i))) {
+ FF[NamedFeatureIndex::CBwithArg]++;
+ found = true;
+ break;
+ }
+ }
+ if (found)
+ break;
+ }
+ }
+ }
+ }
+
+ FF[NamedFloatFeatureIndex::AvgVecInstr] = (float)VecNum / InstrNum;
+ FF[NamedFeatureIndex::Blocks] = BlockNum;
+ if (BlockNum > 0) {
+ uint64_t q = InstrNum / BlockNum;
+ FF[NamedFloatFeatureIndex::InstructionPerBlock] =
+ q + ((float)(InstrNum - q * BlockNum)) / BlockNum;
+ q = SuccNum / BlockNum;
+ FF[NamedFloatFeatureIndex::SuccessorPerBlock] =
+ q + ((float)(SuccNum - q * BlockNum)) / BlockNum;
+ }
+
+ FF[NamedFeatureIndex::MaxCallsiteBlockFreq] = MaxCallsiteBlockFreq;
+ FF[NamedFeatureIndex::NumCallsiteInLoop] = NumCallsiteInLoop;
+ FF[NamedFeatureIndex::Calls] = NumCallsite;
+}
+
+ACPOFIExtendedFeatures::FunctionFeatures
+ACPOFIExtendedFeatures::getFunctionFeatures(
+ Function &F, DominatorTree &DomTree, TargetTransformInfo &TTI, LoopInfo &LI,
+ FunctionAnalysisManager *FAM,
+ bool ValidSize, bool ValidLoop, bool ValidTree) {
+ assert(llvm::is_sorted(ImportantInstructionSuccessions) &&
+ "expected function features are sorted");
+
+ FunctionFeatures FF;
+ size_t InstrCount = getMaxInstructionID() + 1;
+ FF.InstructionHistogram.resize(InstrCount);
+ FF.InstructionPairHistogram.resize(ImportantInstructionSuccessions.size());
+
+ // check all the argument to see if there is a pointer type
+ for (auto arg = F.arg_begin(); arg != F.arg_end(); arg++) {
+ if (isa<PointerType>(arg->getType())) {
+ FF[NamedFeatureIndex::PtrArgs]++;
+ }
+ }
+
+ std::pair<int, int> ValidCallAndInLoopCounts =
+ getValidCallUsesAndInLoopCounts(F, FAM);
+ if (!ValidSize)
+ FF[NamedFeatureIndex::InitialSize] = getSize(F, TTI);
+ FF[NamedFeatureIndex::IsLocal] = F.hasLocalLinkage();
+ FF[NamedFeatureIndex::IsLinkOnceODR] = F.hasLinkOnceODRLinkage();
+ FF[NamedFeatureIndex::IsLinkOnce] = F.hasLinkOnceLinkage();
+ if (!ValidTree)
+ FF[NamedFeatureIndex::MaxDomTreeLevel] =
+ getMaxDominatorTreeDepth(F, DomTree);
+ FF[NamedFeatureIndex::CallUsage] = ValidCallAndInLoopCounts.first;
+ FF[NamedFeatureIndex::NumOfCallUsesInLoop] = ValidCallAndInLoopCounts.second;
+ FF[NamedFeatureIndex::EntryBlockFreq] =
+ FAM->getResult<BlockFrequencyAnalysis>(F)
+ .getEntryFreq();
+ ACPOFIExtendedFeatures::updateBBLoopCallsiteBFFeatures(F, FF, LI, FAM);
+ if (!ValidLoop)
+ ACPOFIExtendedFeatures::updateLoopRelatedFeatures(F, LI, FF);
+ return FF;
+}
+
+static int getCallHeight(Module &M, CallHeight *CH, Function *F) {
+ if (CH == nullptr) {
+ // If we don't have cached result (for ex, running with opt)
+ // We re-calculate the function level
+ // Or using the old pass manager
+ CallHeight CH = CallHeight(M);
+ return CH.getLevel(*F);
+ }
+ return CH->getLevel(*F);
+}
+
+void dumpInstructionPairs(raw_fd_ostream &OS) {
+ for (size_t i = 0; i < ImportantInstructionSuccessions.size(); i++) {
+ std::pair<uint64_t, uint64_t> pair = ImportantInstructionSuccessions[i];
+ OS << "{" << Instruction::getOpcodeName(pair.first) << ", "
+ << Instruction::getOpcodeName(pair.second) << "} ";
+ }
+ OS << "\n";
+}
+
+void dumpFunctionFeatures(raw_fd_ostream &OS,
+ ACPOFIExtendedFeatures::FunctionFeatures &FF,
+ Function &F, bool Verbose) {
+ if (Verbose) {
+ OS << "Function Name: " << F.getName() << "\n";
+ OS << "FeatureCount: " << FF.FeatureCount << "\n";
+ OS << "\nAverage instructions per basic block: "
+ << FF[ACPOFIExtendedFeatures::NamedFloatFeatureIndex::
+ InstructionPerBlock]
+ << "\nAverage number of successors per block: "
+ << FF[ACPOFIExtendedFeatures::NamedFloatFeatureIndex::SuccessorPerBlock]
+ << "\nAverage number of vector instructions per instruction: "
+ << FF[ACPOFIExtendedFeatures::NamedFloatFeatureIndex::AvgVecInstr]
+ << "\nAverage nest level per loop: "
+ << FF[ACPOFIExtendedFeatures::NamedFloatFeatureIndex::AvgNestedLoopLevel]
+ << "\nAverage instructions per loop: "
+ << FF[ACPOFIExtendedFeatures::NamedFloatFeatureIndex::InstrPerLoop]
+ << "\nAverage blocks with multiple succssors per loop: "
+ << FF[ACPOFIExtendedFeatures::NamedFloatFeatureIndex::
+ BlockWithMultipleSuccecorsPerLoop]
+ << "\nInitial Size: "
+ << FF[ACPOFIExtendedFeatures::NamedFeatureIndex::InitialSize] << "\n"
+ << "Blocks: " << FF[ACPOFIExtendedFeatures::NamedFeatureIndex::Blocks]
+ << "\n"
+ << "Calls (Number of callsites): "
+ << FF[ACPOFIExtendedFeatures::NamedFeatureIndex::Calls] << "\n"
+ << "IsLocal: " << FF[ACPOFIExtendedFeatures::NamedFeatureIndex::IsLocal]
+ << "\n"
+ << "IsLinkOnceODR: "
+ << FF[ACPOFIExtendedFeatures::NamedFeatureIndex::IsLinkOnceODR] << "\n"
+ << "IsLinkOnce: "
+ << FF[ACPOFIExtendedFeatures::NamedFeatureIndex::IsLinkOnce] << "\n"
+ << "Loops: " << FF[ACPOFIExtendedFeatures::NamedFeatureIndex::Loops]
+ << "\n"
+ << "MaxLoopDepth: "
+ << FF[ACPOFIExtendedFeatures::NamedFeatureIndex::MaxLoopDepth] << "\n"
+ << "MaxDomTreeLevel: "
+ << FF[ACPOFIExtendedFeatures::NamedFeatureIndex::MaxDomTreeLevel]
+ << "\nPointer arguments of this caller: "
+ << FF[ACPOFIExtendedFeatures::NamedFeatureIndex::PtrArgs]
+ << "\nCallees with pointer arguments: "
+ << FF[ACPOFIExtendedFeatures::NamedFeatureIndex::PtrCallee]
+ << "\nCallees that return a pointer: "
+ << FF[ACPOFIExtendedFeatures::NamedFeatureIndex::CallReturnPtr]
+ << "\nConditional Branches: "
+ << FF[ACPOFIExtendedFeatures::NamedFeatureIndex::ConditionalBranch]
+ << "\nConditional Branches that depends on an argument: "
+ << FF[ACPOFIExtendedFeatures::NamedFeatureIndex::CBwithArg]
+ << "\nCaller Height of the current function: "
+ << FF[ACPOFIExtendedFeatures::NamedFeatureIndex::CallerHeight]
+ << "\nNumber of explict calls to this function: "
+ << FF[ACPOFIExtendedFeatures::NamedFeatureIndex::CallUsage]
+ << "\nIs recursive: "
+ << FF[ACPOFIExtendedFeatures::NamedFeatureIndex::IsRecursive]
+ << "\nNumber of callsites that are inside loop in this function: "
+ << FF[ACPOFIExtendedFeatures::NamedFeatureIndex::NumCallsiteInLoop]
+ << "\nNumber of explict calls to this function that are in loop: "
+ << FF[ACPOFIExtendedFeatures::NamedFeatureIndex::NumOfCallUsesInLoop]
+ << "\nBlock Frequency for the first block of this function: "
+ << FF[ACPOFIExtendedFeatures::NamedFeatureIndex::EntryBlockFreq]
+ << "\nMaximum of all callsites' entry Block Frequency: "
+ << FF[ACPOFIExtendedFeatures::NamedFeatureIndex::MaxCallsiteBlockFreq]
+ << "\n";
+ OS << "InstructionHistogram: ";
+ OS << "Size: " << FF.InstructionHistogram.size() << "\n";
+ for (size_t i = 0; i < FF.InstructionHistogram.size(); i++) {
+ OS << FF.InstructionHistogram[i] << " ";
+ }
+ OS << "\n";
+ OS << "InstructionPairHistogram: ";
+ OS << "Size: " << FF.InstructionPairHistogram.size() << "\n";
+ for (size_t i = 0; i < FF.InstructionPairHistogram.size(); i++) {
+ OS << FF.InstructionPairHistogram[i] << " ";
+ }
+ OS << "\n\n";
+ } else {
+ OS << F.getName() << " "
+ << FF[ACPOFIExtendedFeatures::NamedFloatFeatureIndex::
+ InstructionPerBlock]
+ << " "
+ << FF[ACPOFIExtendedFeatures::NamedFloatFeatureIndex::SuccessorPerBlock]
+ << " " << FF[ACPOFIExtendedFeatures::NamedFloatFeatureIndex::AvgVecInstr]
+ << " "
+ << FF[ACPOFIExtendedFeatures::NamedFloatFeatureIndex::AvgNestedLoopLevel]
+ << " "
+ << FF[ACPOFIExtendedFeatures::NamedFloatFeatureIndex::InstrPerLoop]
+ << " "
+ << FF[ACPOFIExtendedFeatures::NamedFloatFeatureIndex::
+ BlockWithMultipleSuccecorsPerLoop]
+ << " " << FF[ACPOFIExtendedFeatures::NamedFeatureIndex::InitialSize]
+ << " " << FF[ACPOFIExtendedFeatures::NamedFeatureIndex::Blocks] << " "
+ << FF[ACPOFIExtendedFeatures::NamedFeatureIndex::Calls] << " "
+ << FF[ACPOFIExtendedFeatures::NamedFeatureIndex::IsLocal] << " "
+ << FF[ACPOFIExtendedFeatures::NamedFeatureIndex::IsLinkOnceODR] << " "
+ << FF[ACPOFIExtendedFeatures::NamedFeatureIndex::IsLinkOnce] << " "
+ << FF[ACPOFIExtendedFeatures::NamedFeatureIndex::Loops] << " "
+ << FF[ACPOFIExtendedFeatures::NamedFeatureIndex::MaxLoopDepth] << " "
+ << FF[ACPOFIExtendedFeatures::NamedFeatureIndex::MaxDomTreeLevel] << " "
+ << FF[ACPOFIExtendedFeatures::NamedFeatureIndex::PtrArgs] << " "
+ << FF[ACPOFIExtendedFeatures::NamedFeatureIndex::PtrCallee] << " "
+ << FF[ACPOFIExtendedFeatures::NamedFeatureIndex::CallReturnPtr] << " "
+ << FF[ACPOFIExtendedFeatures::NamedFeatureIndex::ConditionalBranch]
+ << " " << FF[ACPOFIExtendedFeatures::NamedFeatureIndex::CBwithArg] << " "
+ << FF[ACPOFIExtendedFeatures::NamedFeatureIndex::CallerHeight] << " "
+ << FF[ACPOFIExtendedFeatures::NamedFeatureIndex::CallUsage] << " "
+ << FF[ACPOFIExtendedFeatures::NamedFeatureIndex::IsRecursive] << " "
+ << FF[ACPOFIExtendedFeatures::NamedFeatureIndex::NumCallsiteInLoop]
+ << " "
+ << FF[ACPOFIExtendedFeatures::NamedFeatureIndex::NumOfCallUsesInLoop]
+ << " " << FF[ACPOFIExtendedFeatures::NamedFeatureIndex::EntryBlockFreq]
+ << " "
+ << FF[ACPOFIExtendedFeatures::NamedFeatureIndex::MaxCallsiteBlockFreq]
+ << " ";
+
+ for (size_t i = 0; i < FF.InstructionHistogram.size(); i++) {
+ OS << FF.InstructionHistogram[i] << " ";
+ }
+ for (size_t i = 0; i < FF.InstructionPairHistogram.size(); i++) {
+ OS << FF.InstructionPairHistogram[i] << " ";
+ }
+ OS << "\n";
+ }
+}
+
+void runAndDump(raw_fd_ostream &OS, Function *F, DominatorTree &DomTree,
+ TargetTransformInfo &TTI, LoopInfo &LI, Module &M,
+ CallHeight *CH, FunctionAnalysisManager *FAM = nullptr) {
+ struct ACPOFIExtendedFeatures::FunctionFeatures FF =
+ ACPOFIExtendedFeatures::getFunctionFeatures(*F, DomTree, TTI, LI, FAM);
+ // Get the call height feature
+ FF[ACPOFIExtendedFeatures::NamedFeatureIndex::CallerHeight] =
+ getCallHeight(M, CH, F);
+ dumpFunctionFeatures(OS, FF, *F, Verbose);
+}
+
+std::unique_ptr<raw_fd_ostream> setUpOS() {
+ // Check ACPO/llvm-project issue #112
+ std::error_code FileErr;
+ std::unique_ptr<raw_fd_ostream> OS(
+ new raw_fd_ostream(OutFile.c_str(), FileErr, llvm::sys::fs::OF_Append));
+
+ if (FileErr) {
+ llvm::errs() << "Error opening info file " << OutFile.c_str() << ": "
+ << FileErr.message() << "\n";
+ return nullptr;
+ }
+
+ if (CheckPairHisto) {
+ dumpInstructionPairs(*OS);
+ return nullptr;
+ }
+
+ return OS;
+}
+
+PreservedAnalyses DumpFeaturePass::run(LazyCallGraph::SCC &C,
+ CGSCCAnalysisManager &AM,
+ LazyCallGraph &CG,
+ CGSCCUpdateResult &UR) {
+ std::unique_ptr<raw_fd_ostream> OS = setUpOS();
+ if (!OS)
+ return PreservedAnalyses::all();
+
+ FunctionAnalysisManager &FAM =
+ AM.getResult<FunctionAnalysisManagerCGSCCProxy>(C, CG).getManager();
+
+ const auto &MAMProxy = AM.getResult<ModuleAnalysisManagerCGSCCProxy>(C, CG);
+ Module &M = *C.begin()->getFunction().getParent();
+ CallHeight *CH = MAMProxy.getCachedResult<CallHeightAnalysis>(M);
+ for (LazyCallGraph::Node &N : C) {
+ Function *F = &N.getFunction();
+ if (F->empty()) {
+ continue;
+ }
+
+ auto &DomTree = FAM.getResult<DominatorTreeAnalysis>(*F);
+ auto &TTI = FAM.getResult<TargetIRAnalysis>(*F);
+ auto &LI = FAM.getResult<LoopAnalysis>(*F);
+
+ runAndDump(*OS, F, DomTree, TTI, LI, M, CH, &FAM);
+ }
+ return PreservedAnalyses::all();
+}
+
+ACPOFIExtendedFeatures::NamedFeatureIndex &
+llvm::operator++(ACPOFIExtendedFeatures::NamedFeatureIndex &n) {
+ return n = static_cast<ACPOFIExtendedFeatures::NamedFeatureIndex>((int)n + 1);
+}
+
+ACPOFIExtendedFeatures::NamedFeatureIndex
+operator++(ACPOFIExtendedFeatures::NamedFeatureIndex &n, int) {
+ ACPOFIExtendedFeatures::NamedFeatureIndex res = n;
+ ++n;
+ return res;
+}
+
+ACPOFIExtendedFeatures::NamedFloatFeatureIndex &
+llvm::operator++(ACPOFIExtendedFeatures::NamedFloatFeatureIndex &n) {
+ return n = static_cast<ACPOFIExtendedFeatures::NamedFloatFeatureIndex>((int)n + 1);
+}
+
+ACPOFIExtendedFeatures::NamedFloatFeatureIndex
+operator++(ACPOFIExtendedFeatures::NamedFloatFeatureIndex &n, int) {
+ ACPOFIExtendedFeatures::NamedFloatFeatureIndex res = n;
+ ++n;
+ return res;
+}
diff --git a/llvm/lib/Analysis/ModelDataCollector.cpp b/llvm/lib/Analysis/ModelDataCollector.cpp
new file mode 100644
index 000000000000..5d599bff25a4
--- /dev/null
+++ b/llvm/lib/Analysis/ModelDataCollector.cpp
@@ -0,0 +1,350 @@
+//===- ModelDataCollector.cpp - Data collector for ML model --------------===//
+//
+// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
+// See https://llvm.org/LICENSE.txt for license information.
+// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
+//
+//===----------------------------------------------------------------------===//
+//
+// This file implements the collection and dumping of data for the ML models
+//
+//===----------------------------------------------------------------------===//
+
+#if defined(ENABLE_ACPO)
+#include "llvm/Analysis/LoopInfo.h"
+#include "llvm/Analysis/ModelDataCollector.h"
+#include "llvm/Demangle/Demangle.h"
+#include "llvm/IR/Function.h"
+#include "llvm/Support/CommandLine.h"
+#include "llvm/Support/FileSystem.h"
+#include "llvm/Support/Path.h"
+
+using namespace llvm;
+
+#define DEBUG_TYPE "model-data-collector"
+
+// Defined in 'lib/IR/AsmWriter.cpp'
+extern cl::opt<std::string> UnnamedVariablePrefix;
+
+static cl::opt<std::string> IRFileDirectory(
+ "IR-file-directory", cl::Hidden,
+ cl::desc("Name of a directory to store IR files."));
+
+cl::opt<std::string>
+ ACPOModelFile("acpo-dump-file", cl::init("-"), cl::Hidden,
+ cl::desc("Name of a file to store feature data in."));
+
+std::string ModelDataCollector::getDumpOptionAsString(DumpOption DO) {
+ switch (DO) {
+ case DumpOption::loop:
+ return "loop";
+ case DumpOption::function:
+ return "function";
+ case DumpOption::before:
+ return "before";
+ case DumpOption::after:
+ return "after";
+ default:
+ return "";
+ }
+}
+
+std::vector<std::pair<std::string, std::string>> ModelDataCollector::getFeatures() {
+ return Features;
+}
+
+StringMap<std::string> ModelDataCollector::getIRFileNameMap() {
+ return IRFileNames;
+}
+
+std::string ModelDataCollector::getOutputFileName() { return OutputFileName; }
+
+bool ModelDataCollector::isEmptyOutputFile() {
+ if (OutputFileName.empty())
+ return false;
+
+ if (!sys::fs::exists(OutputFileName))
+ return true;
+
+ uint64_t Size;
+ std::error_code EC = sys::fs::file_size(OutputFileName, Size);
+ if (EC) {
+ llvm::errs() << "Cannot get file size: " << EC.message() << "\n";
+ assert(false && "Cannot get file size.");
+ }
+
+ if (Size == 0)
+ return true;
+
+ return false;
+}
+
+void ModelDataCollector::collectFeatures(Loop *L, const std::string &ModuleName,
+ const std::string &FuncName, const std::string &LoopName) {
+}
+
+void ModelDataCollector::collectFeatures() {
+ for (auto &FeatureCollectInfo : FeatureCollectInfos) {
+ ACPOCollectFeatures::FeatureValueMap FeatureMap;
+
+ if (FeatureCollectInfo->FeaturesInfo.get()) {
+ FeatureMap = FeatureCollectInfo->FeatureCollector->getFeaturesPair(
+ *FeatureCollectInfo->FeaturesInfo.get());
+ } else if (FeatureCollectInfo->RegisteredScopes.get()) {
+ FeatureCollectInfo->FeatureCollector->setGlobalFeatureInfo(
+ *FeatureCollectInfo->GlobalInfo.get());
+ FeatureMap = FeatureCollectInfo->FeatureCollector->getFeaturesPair(
+ *FeatureCollectInfo->RegisteredScopes.get());
+ } else if (FeatureCollectInfo->RegisteredGroupIDs.get()) {
+ FeatureCollectInfo->FeatureCollector->setGlobalFeatureInfo(
+ *FeatureCollectInfo->GlobalInfo.get());
+ FeatureMap = FeatureCollectInfo->FeatureCollector->getFeaturesPair(
+ *FeatureCollectInfo->RegisteredGroupIDs.get());
+ } else {
+ outs() << "No Features are collected, since the given "
+ "FeatureCollectInfo is invalid.\n";
+ return;
+ }
+ for (auto const &[key, val] : FeatureMap) {
+ std::string FeatureName;
+
+ if (FeatureCollectInfo->Prefix != "")
+ FeatureName += FeatureCollectInfo->Prefix + "_";
+
+ FeatureName += ACPOCollectFeatures::getFeatureName(key);
+
+ if (FeatureCollectInfo->Postfix != "")
+ FeatureName += "_" + FeatureCollectInfo->Postfix;
+
+ Features.insert(Features.end(), {std::make_pair(FeatureName, val)});
+ }
+ }
+}
+
+void ModelDataCollector::registerFeature(ACPOCollectFeatures::FeaturesInfo Info,
+ std::string Pre, std::string Post) {
+ std::unique_ptr<ModelDataCollector::FeatureCollectInfo> tmp =
+ std::make_unique<ModelDataCollector::FeatureCollectInfo>();
+ tmp->FeaturesInfo.reset(new ACPOCollectFeatures::FeaturesInfo{Info});
+ tmp->FeatureCollector.reset(new ACPOCollectFeatures{});
+ tmp->Prefix = Pre;
+ tmp->Postfix = Post;
+
+ FeatureCollectInfos.push_back(std::move(tmp));
+}
+
+void ModelDataCollector::registerFeature(
+ ACPOCollectFeatures::Scopes ScopeVec,
+ ACPOCollectFeatures::FeatureInfo GlobalInfo, std::string Pre,
+ std::string Post) {
+ std::unique_ptr<ModelDataCollector::FeatureCollectInfo> tmp =
+ std::make_unique<ModelDataCollector::FeatureCollectInfo>();
+ tmp->RegisteredScopes.reset(new ACPOCollectFeatures::Scopes{ScopeVec});
+ tmp->FeatureCollector.reset(new ACPOCollectFeatures{});
+ tmp->GlobalInfo.reset(new ACPOCollectFeatures::FeatureInfo{GlobalInfo});
+ tmp->Prefix = Pre;
+ tmp->Postfix = Post;
+
+ FeatureCollectInfos.push_back(std::move(tmp));
+}
+
+void ModelDataCollector::registerFeature(
+ ACPOCollectFeatures::GroupIDs GroupIDVec,
+ ACPOCollectFeatures::FeatureInfo GlobalInfo, std::string Pre,
+ std::string Post) {
+ std::unique_ptr<ModelDataCollector::FeatureCollectInfo> tmp =
+ std::make_unique<ModelDataCollector::FeatureCollectInfo>();
+ tmp->RegisteredGroupIDs.reset(new ACPOCollectFeatures::GroupIDs{GroupIDVec});
+ tmp->FeatureCollector.reset(new ACPOCollectFeatures{});
+ tmp->GlobalInfo.reset(new ACPOCollectFeatures::FeatureInfo{GlobalInfo});
+ tmp->Prefix = Pre;
+ tmp->Postfix = Post;
+
+ FeatureCollectInfos.push_back(std::move(tmp));
+}
+
+void ModelDataCollector::resetRegisteredFeatures() {
+ FeatureCollectInfos.clear();
+ Features.clear();
+}
+
+std::string ModelDataCollector::demangleName(const std::string &Name) {
+ ItaniumPartialDemangler D;
+ if (!D.partialDemangle(Name.c_str()))
+ return D.getFunctionBaseName(nullptr, nullptr);
+
+ return Name;
+}
+
+void ModelDataCollector::setFeatures(
+ std::vector<std::pair<std::string, std::string>> NewFeatures) {
+ Features = NewFeatures;
+}
+
+void ModelDataCollector::addFeatures(
+ std::vector<std::pair<std::string, std::string>> NewFeatures) {
+ Features.insert(Features.end(), NewFeatures.begin(), NewFeatures.end());
+}
+
+void ModelDataCollector::setIRFileNameMap(StringMap<std::string> IRFileNameMap) {
+ IRFileNames = IRFileNameMap;
+}
+
+void ModelDataCollector::printRow(bool printHeader) {
+ // Print the IR file names first
+ for (const auto &P : IRFileNames) {
+ if (printHeader)
+ Out << P.getKey();
+ else
+ Out << P.getValue();
+
+ Out << ",";
+ }
+
+ for (unsigned I = 0, E = Features.size(); I != E; ++I ) {
+ // First value does not get a comma
+ if (I)
+ Out << ",";
+
+ if (printHeader)
+ Out << Features.at(I).first;
+ else
+ Out << Features.at(I).second;
+ }
+
+ Out << "\n";
+}
+
+/*std::string ModelDataCollector::generateIRFileName(autotuning::CodeRegion CR) {
+ // File name = source_location + pass_name + coderegion_type + hash,
+ // where source_location = file_name + func_name + loop_name
+ // + line_number + column_number
+ std::string IRFileName =
+ sys::path::filename(StringRef(CR.getFileName())).str() + "_"
+ + demangleName(CR.getFuncName()) + "_"
+ + CR.getName() + "_"
+ + std::to_string(CR.getSourceLoc().SourceLine) + "_"
+ + std::to_string(CR.getSourceLoc().SourceColumn) + "_"
+ + CR.getPassName() + "_"
+ + CR.getTypeAsString() + "_"
+ + std::to_string(CR.getHash()) + ".ll";
+ return IRFileName;
+}*/
+
+std::string ModelDataCollector::getIRFileName(StringRef Key) {
+ if (IRFileNames.count(Key))
+ return IRFileNames.find(Key)->second;
+
+ return "None";
+}
+
+std::unique_ptr<raw_ostream>
+ModelDataCollector::createFile(const Twine &FilePath,
+ const Twine &FileName,
+ std::error_code &EC) {
+ if (std::error_code EC = sys::fs::create_directories(FilePath))
+ errs() << "Error creating directory: " << FilePath << ": "
+ << EC.message() << "\n";
+
+ return std::make_unique<raw_fd_ostream>((FilePath + "/" + FileName).str(), EC);
+}
+
+void ModelDataCollector::createIRFileForLoop(Loop *L, const Twine &IRFilePath,
+ const Twine &IRFileName,
+ bool OverwriteIRFile) {
+ if (!OverwriteIRFile && sys::fs::exists(IRFilePath + "/" + IRFileName))
+ return;
+
+ // Write IR to file
+ std::error_code EC;
+ auto OS = createFile(IRFilePath, Twine(IRFileName), EC);
+ if (EC) {
+ errs() << "Error creating loop IR file: " << IRFileName << ": "
+ << EC.message() << "\n";
+ return;
+ }
+
+ // Print loop wrapped in function if -unnamed-var-prefix is set by user
+ if (UnnamedVariablePrefix.getNumOccurrences() > 0) {
+ SmallVector<BasicBlock *, 8> ExitBlocks;
+ L->getExitBlocks(ExitBlocks);
+ // May need to move this code out of Loop data structure in LLVM. Will see.
+ L->printWithFunctionWrapper(*OS, L->getHeader()->getParent(),
+ L->getBlocks(), L->getHeader(), ExitBlocks,
+ /*AAW*/ nullptr,
+ /*ShouldPreserveUseListOrder*/ false,
+ /*IsForDebug*/ false);
+ } else {
+ L->print(*OS, /*Depth*/ 0, /*Verbose*/ true);
+ }
+}
+
+void ModelDataCollector::createIRFileForFunction(Function *F,
+ const Twine &IRFilePath,
+ const Twine &IRFileName,
+ bool OverwriteIRFile) {
+ if (!OverwriteIRFile && sys::fs::exists(IRFilePath + "/" + IRFileName))
+ return;
+
+ // Write IR to file
+ std::error_code EC;
+ auto OS = createFile(IRFilePath, Twine(IRFileName), EC);
+ if (EC) {
+ errs() << "Error creating function IR file: " << IRFileName << ": "
+ << EC.message() << "\n";
+ return;
+ }
+ // May need to investigate this print function change.
+ F->print(*OS, /*AAW*/ nullptr, /*ShouldPreserveUseListOrder*/ false,
+ /*IsForDebug*/ false);
+}
+
+void ModelDataCollector::writeIR(Loop *L, Function *F,
+ std::string NewIRFileName,
+ std::string PassName,
+ DumpOption DumpBeforeOrAfter, bool PrintLoop,
+ bool PrintFunction, bool OverwriteIRFile) {
+ // Create base directory first
+ SmallString<256> IRFilePath;
+ if (IRFileDirectory.getNumOccurrences() > 0) {
+ // Third priority is the directory specified by
+ // the -IR-file-directory option
+ Twine BaseDir(IRFileDirectory);
+ BaseDir.toVector(IRFilePath);
+ } else {
+ // No directory specified
+ return;
+ }
+
+ if (getDumpOptionAsString(DumpBeforeOrAfter).empty())
+ return;
+
+ // Create sub-directories to store corresponding IR files.
+ // Directory name = before/after + pass_name + coderegion_type
+ std::string SubDir = getDumpOptionAsString(DumpBeforeOrAfter)
+ + "_" + PassName;
+ if (L && PrintLoop) {
+ createIRFileForLoop(L,
+ Twine(IRFilePath) + "/" + SubDir + "_" +
+ getDumpOptionAsString(DumpOption::loop),
+ Twine(NewIRFileName), OverwriteIRFile);
+ // Add IR file name for summary data file
+ IRFileNames.insert(std::pair<std::string, std::string> (
+ getDumpOptionAsString(DumpBeforeOrAfter)
+ + getDumpOptionAsString(DumpOption::loop),
+ NewIRFileName));
+ }
+
+ if (F && PrintFunction) {
+ createIRFileForFunction(F,
+ Twine(IRFilePath) + "/" + SubDir + "_" +
+ getDumpOptionAsString(DumpOption::function),
+ Twine(NewIRFileName), OverwriteIRFile);
+ // Add IR file name for summary data file
+ IRFileNames.insert(std::pair<std::string, std::string> (
+ getDumpOptionAsString(DumpBeforeOrAfter)
+ + getDumpOptionAsString(DumpOption::function),
+ NewIRFileName));
+ }
+}
+#endif // ENABLE_ACPO
diff --git a/llvm/lib/CodeGen/CMakeLists.txt b/llvm/lib/CodeGen/CMakeLists.txt
index 9029dc7bb3d9..579074408b55 100644
--- a/llvm/lib/CodeGen/CMakeLists.txt
+++ b/llvm/lib/CodeGen/CMakeLists.txt
@@ -1,4 +1,4 @@
-if (DEFINED LLVM_HAVE_TF_AOT OR LLVM_HAVE_TFLITE)
+if ((DEFINED LLVM_HAVE_TF_AOT OR DEFINED LLVM_HAVE_TF_API) AND (NOT ACPO_AOT))
include(TensorFlowCompile)
set(LLVM_RAEVICT_MODEL_PATH_DEFAULT "models/regalloc-eviction")
diff --git a/llvm/lib/IR/AsmWriter.cpp b/llvm/lib/IR/AsmWriter.cpp
index af77e6c2dc4d..a02c603a14a5 100644
--- a/llvm/lib/IR/AsmWriter.cpp
+++ b/llvm/lib/IR/AsmWriter.cpp
@@ -86,8 +86,16 @@
#include <utility>
#include <vector>
+#include "llvm/ADT/StringSet.h"
+#include "llvm/Analysis/LoopInfo.h"
+#include "llvm/Support/CommandLine.h"
+
using namespace llvm;
+cl::opt<std::string> UnnamedVariablePrefix(
+ "unnamed-var-prefix", cl::Hidden,
+ cl::desc("Specify the prefix added to unnamed variables"), cl::init(""));
+
// Make virtual table appear in this compilation unit.
AssemblyAnnotationWriter::~AssemblyAnnotationWriter() = default;
@@ -2487,9 +2495,11 @@ static void WriteAsOperandInternal(raw_ostream &Out, const Value *V,
Slot = -1;
}
- if (Slot != -1)
- Out << Prefix << Slot;
- else
+ if (Slot != -1) {
+ // By default, UnnamedVariablePrefix is empty so it matches original behaviour
+ // unless specified.
+ Out << Prefix << UnnamedVariablePrefix << Slot;
+ } else
Out << "<badref>";
}
@@ -2602,12 +2612,13 @@ public:
void writeAllAttributeGroups();
void printTypeIdentities();
-#if defined(ENABLE_AUTOTUNER)
+#if defined(ENABLE_AUTOTUNER) || defined(ENABLE_ACPO)
void printGlobal(const GlobalVariable *GV, bool PrintDeclarationOnly = false);
void printAlias(const GlobalAlias *GA);
void printIFunc(const GlobalIFunc *GI);
void printComdat(const Comdat *C);
- void printRequisiteDeclarations(const Function *F);
+ void printRequisiteDeclarations(const Function *F,
+ std::vector<BasicBlock *> LoopBlocks = {});
void printFunction(const Function *F, bool PrintCompleteIR = false,
bool PrintDeclarationOnly = false);
#else
@@ -2616,9 +2627,15 @@ public:
void printIFunc(const GlobalIFunc *GI);
void printComdat(const Comdat *C);
void printFunction(const Function *F);
+#endif
+#if defined(ENABLE_ACPO)
+ void printLoopWithFunctionWrapper(Function *F,
+ std::vector<BasicBlock *> LoopBlocks,
+ BasicBlock *Header,
+ SmallVector<BasicBlock *, 8> ExitBlocks);
#endif
void printArgument(const Argument *FA, AttributeSet Attrs);
- void printBasicBlock(const BasicBlock *BB);
+ void printBasicBlock(const BasicBlock *BB, bool PrintLabelOnly = false);
void printInstructionLine(const Instruction &I);
void printInstruction(const Instruction &I);
@@ -3603,7 +3620,7 @@ static void maybePrintComdat(formatted_raw_ostream &Out,
Out << ')';
}
-#if defined(ENABLE_AUTOTUNER)
+#if defined(ENABLE_AUTOTUNER) || defined(ENABLE_ACPO)
void AssemblyWriter::printGlobal(const GlobalVariable *GV,
bool PrintDeclarationOnly) {
if (GV->isMaterializable() && !PrintDeclarationOnly)
@@ -3617,7 +3634,7 @@ void AssemblyWriter::printGlobal(const GlobalVariable *GV) {
WriteAsOperandInternal(Out, GV, WriterCtx);
Out << " = ";
-#if defined(ENABLE_AUTOTUNER)
+#if defined(ENABLE_AUTOTUNER) || defined(ENABLE_ACPO)
if ((!GV->hasInitializer() || PrintDeclarationOnly) &&
GV->hasExternalLinkage())
#else
@@ -3640,7 +3657,7 @@ void AssemblyWriter::printGlobal(const GlobalVariable *GV) {
Out << (GV->isConstant() ? "constant " : "global ");
TypePrinter.print(GV->getValueType(), Out);
-#if defined(ENABLE_AUTOTUNER)
+#if defined(ENABLE_AUTOTUNER) || defined(ENABLE_ACPO)
if (GV->hasInitializer() && !PrintDeclarationOnly) {
#else
if (GV->hasInitializer()) {
@@ -3794,21 +3811,34 @@ void AssemblyWriter::printTypeIdentities() {
}
}
-#if defined(ENABLE_AUTOTUNER)
+#if defined(ENABLE_AUTOTUNER) || defined(ENABLE_ACPO)
/// printRequisiteDeclarations - Print the declarations of type identities,
/// global variables, functions, and function attribute groups of a function.
-void AssemblyWriter::printRequisiteDeclarations(const Function *F) {
+void AssemblyWriter::printRequisiteDeclarations(
+ const Function *F, std::vector<BasicBlock *> LoopBlocks) {
// walk through instructions and collect global variables & functions
SmallPtrSet<GlobalVariable *, 8> GVs;
SmallPtrSet<Function *, 8> Functions;
- for (const BasicBlock &BB : *F) {
- for (const Instruction &I : BB) {
+ std::vector<BasicBlock *> BasicBlocks;
+ if (!LoopBlocks.empty()) {
+ for (BasicBlock *BB : LoopBlocks)
+ BasicBlocks.push_back(BB);
+ } else {
+ for (const BasicBlock &BB : *F)
+ BasicBlocks.push_back(const_cast<BasicBlock *>(&BB));
+ }
+
+ for (const BasicBlock *BB : BasicBlocks) {
+ for (const Instruction &I : *BB) {
// Check for function
if (const auto *CI = dyn_cast<CallInst>(&I)) {
Function *func = CI->getCalledFunction();
if (func)
Functions.insert(func);
}
+ if (const InvokeInst *II = dyn_cast<InvokeInst>(&I))
+ if (Function *func = dyn_cast<Function>(II->getCalledOperand()))
+ Functions.insert(func);
// Check for global variables
for (const Use &U : I.operands()) {
if (GlobalVariable *gv = dyn_cast<GlobalVariable>(U))
@@ -3823,6 +3853,16 @@ void AssemblyWriter::printRequisiteDeclarations(const Function *F) {
GVs.insert(gv);
}
}
+ // Check for ConstantExpr BitCast
+ if (const auto *CstExpr = dyn_cast<ConstantExpr>(U))
+ if (CstExpr->isCast())
+ for (const Use &UU : CstExpr->operands()) {
+ if (GlobalVariable *gv = dyn_cast<GlobalVariable>(UU))
+ GVs.insert(gv);
+ else if (const Function *func =
+ dyn_cast<Function>(CstExpr->stripPointerCasts()))
+ Functions.insert(const_cast<Function *>(func));
+ }
}
}
}
@@ -3842,7 +3882,7 @@ void AssemblyWriter::printRequisiteDeclarations(const Function *F) {
// modify property if needed
if (!(*GVit)->hasAvailableExternallyLinkage() &&
!((*GVit)->getName() == "llvm.global_ctors") &&
- (*GVit)->hasLocalLinkage()) {
+ ((*GVit)->hasLocalLinkage() || (*GVit)->hasCommonLinkage())) {
(*GVit)->setLinkage(GlobalValue::ExternalLinkage);
(*GVit)->setVisibility(GlobalValue::HiddenVisibility);
}
@@ -3860,8 +3900,14 @@ void AssemblyWriter::printRequisiteDeclarations(const Function *F) {
// print functions
for (auto FuncIt = Functions.begin(), et = Functions.end(); FuncIt != et;
++FuncIt) {
+ if (!LoopBlocks.empty() && *FuncIt == F)
+ continue;
Out << '\n';
+ GlobalValue::LinkageTypes SavedLinkage = (*FuncIt)->getLinkage();
+ // Function declarations can only have external or extern_weak linkage
+ (*FuncIt)->setLinkage(GlobalValue::ExternalLinkage);
printFunction(*FuncIt, false, true);
+ (*FuncIt)->setLinkage(SavedLinkage);
}
// Write attribute groups.
@@ -3873,7 +3919,8 @@ void AssemblyWriter::printRequisiteDeclarations(const Function *F) {
}
/// printFunction - Print all aspects of a function.
-void AssemblyWriter::printFunction(const Function *F, bool PrintCompleteIR,
+void AssemblyWriter::printFunction(const Function *F,
+ bool PrintCompleteIR,
bool PrintDeclarationOnly) {
if (PrintCompleteIR && !PrintDeclarationOnly) {
printRequisiteDeclarations(F);
@@ -3887,6 +3934,9 @@ void AssemblyWriter::printFunction(const Function *F, bool PrintCompleteIR,
void AssemblyWriter::printFunction(const Function *F) {
if (AnnotationWriter) AnnotationWriter->emitFunctionAnnot(F, Out);
+ if (AnnotationWriter)
+ AnnotationWriter->emitFunctionAnnot(F, Out);
+
if (F->isMaterializable())
Out << "; Materializable\n";
#endif
@@ -3907,7 +3957,7 @@ void AssemblyWriter::printFunction(const Function *F) {
Out << "; Function Attrs: " << AttrStr << '\n';
}
-#if defined(ENABLE_AUTOTUNER)
+#if defined(ENABLE_AUTOTUNER) || defined(ENABLE_ACPO)
if (!PrintDeclarationOnly)
Machine.incorporateFunction(F);
@@ -3952,7 +4002,7 @@ void AssemblyWriter::printFunction(const Function *F) {
Out << '(';
// Loop over the arguments, printing them...
-#if defined(ENABLE_AUTOTUNER)
+#if defined(ENABLE_AUTOTUNER) || defined(ENABLE_ACPO)
if ((F->isDeclaration() && !IsForDebug) || PrintDeclarationOnly) {
#else
if (F->isDeclaration() && !IsForDebug) {
@@ -4027,7 +4077,7 @@ void AssemblyWriter::printFunction(const Function *F) {
writeOperand(F->getPersonalityFn(), /*PrintType=*/true);
}
-#if defined(ENABLE_AUTOTUNER)
+#if defined(ENABLE_AUTOTUNER) || defined(ENABLE_ACPO)
if (F->isDeclaration() || PrintDeclarationOnly) {
#else
if (F->isDeclaration()) {
@@ -4049,16 +4099,102 @@ void AssemblyWriter::printFunction(const Function *F) {
Out << "}\n";
}
-#if defined(ENABLE_AUTOTUNER)
+#if defined(ENABLE_AUTOTUNER) || defined(ENABLE_ACPO)
// Output metadata
if (!Machine.mdn_empty() && PrintCompleteIR && !PrintDeclarationOnly) {
Out << '\n';
writeAllMDNodes();
}
-#endif
+ if (!PrintDeclarationOnly)
+ Machine.purgeFunction();
+#else
Machine.purgeFunction();
+#endif
}
+#if defined(ENABLE_ACPO)
+/// printLoopWithFunctionWrapper - print out a loop wrapped in a dummy
+/// function. All global/local variables, functions and metadata that
+/// are referenced inside the loop are printed out. Loop predecessors
+/// and loop exit blocks are also included.
+void AssemblyWriter::printLoopWithFunctionWrapper(
+ Function *F, std::vector<BasicBlock *> LoopBlocks, BasicBlock *Header,
+ SmallVector<BasicBlock *, 8> ExitBlocks) {
+ printRequisiteDeclarations(F, LoopBlocks);
+
+ // Output the dummy function
+ bool IsFirstArgument = true;
+ Out << "define void ";
+ std::string FunctionName = "foo";
+ PrintLLVMName(Out, FunctionName, GlobalPrefix);
+ Out << '(';
+ for (const Argument &Arg : F->args()) {
+ if (IsFirstArgument) // Add commas if there are more than one
+ IsFirstArgument = false;
+ else
+ Out << ", ";
+ Out << &Arg;
+ }
+
+ // All local variables referenced in this loop but are not declared here
+ // are printed next in the argument list
+ SmallPtrSet<const Instruction *, 32> AddedVariables;
+ for (const BasicBlock *BB : LoopBlocks)
+ for (const Instruction &I : *BB)
+ for (unsigned i = 0, e = I.getNumOperands(); i != e; ++i) {
+ Value *Op = I.getOperand(i);
+ // Print out the operand in the function argument list
+ // if it is an instruction that is not contained in this loop
+ if (const Instruction *II = dyn_cast_or_null<Instruction>(Op))
+ if (!AddedVariables.contains(II) &&
+ std::find(LoopBlocks.begin(), LoopBlocks.end(),
+ II->getParent()) != LoopBlocks.end()) {
+ AddedVariables.insert(II);
+ if (IsFirstArgument)
+ IsFirstArgument = false;
+ else
+ Out << ", ";
+
+ writeOperand(Op, true);
+ }
+ }
+
+ Out << ") {\n";
+
+ // Output loop predecessors
+ // Each predecessor only needs to have an unconditional 'br' instruction
+ // that branches to the loop header
+ for (const BasicBlock *Pred : children<Inverse<BasicBlock *>>(Header))
+ // If the block is not in the loop
+ if (std::find(LoopBlocks.begin(), LoopBlocks.end(), Pred) !=
+ LoopBlocks.end()) {
+ printBasicBlock(Pred, true);
+ Out << " br label %";
+ PrintLLVMName(Out, Header->getName(), LabelPrefix);
+ Out << "\n";
+ }
+
+ // Output all of the loop's basic blocks
+ for (const BasicBlock *BB : LoopBlocks)
+ printBasicBlock(BB);
+
+ // Output loop exit blocks
+ // Each exit block only needs a 'ret' instruction
+ for (const BasicBlock *Succ : ExitBlocks) {
+ printBasicBlock(Succ, true);
+ Out << " ret void\n";
+ }
+
+ Out << "}\n";
+
+ // Output metadata
+ if (!Machine.mdn_empty()) {
+ Out << '\n';
+ writeAllMDNodes();
+ }
+}
+#endif
+
/// printArgument - This member is called for every argument that is passed into
/// the function. Simply print it out
void AssemblyWriter::printArgument(const Argument *Arg, AttributeSet Attrs) {
@@ -4078,13 +4214,17 @@ void AssemblyWriter::printArgument(const Argument *Arg, AttributeSet Attrs) {
} else {
int Slot = Machine.getLocalSlot(Arg);
assert(Slot != -1 && "expect argument in function here");
- Out << " %" << Slot;
+ // By default, UnnamedVariablePrefix is empty so it matches original behaviour
+ // unless specified.
+ Out << " %" << UnnamedVariablePrefix << Slot;
}
}
/// printBasicBlock - This member is called for each basic block in a method.
-void AssemblyWriter::printBasicBlock(const BasicBlock *BB) {
- bool IsEntryBlock = BB->getParent() && BB->isEntryBlock();
+void AssemblyWriter::printBasicBlock(const BasicBlock *BB,
+ bool PrintLabelOnly) {
+ assert(BB && BB->getParent() && "block without parent!");
+ bool IsEntryBlock = BB == &BB->getParent()->getEntryBlock();
if (BB->hasName()) { // Print out the label if it exists...
Out << "\n";
PrintLLVMName(Out, BB->getName(), LabelPrefix);
@@ -4092,12 +4232,19 @@ void AssemblyWriter::printBasicBlock(const BasicBlock *BB) {
} else if (!IsEntryBlock) {
Out << "\n";
int Slot = Machine.getLocalSlot(BB);
- if (Slot != -1)
- Out << Slot << ":";
- else
+ if (Slot != -1) {
+ // By default, UnnamedVariablePrefix is empty so it matches original behaviour
+ // unless specified.
+ Out << UnnamedVariablePrefix << Slot << ":";
+ } else
Out << "<badref>:";
}
+ if (PrintLabelOnly) {
+ Out << "\n";
+ return;
+ }
+
if (!IsEntryBlock) {
// Output predecessors for the block.
Out.PadToColumn(50);
@@ -4191,8 +4338,11 @@ void AssemblyWriter::printInstruction(const Instruction &I) {
int SlotNum = Machine.getLocalSlot(&I);
if (SlotNum == -1)
Out << "<badref> = ";
- else
- Out << '%' << SlotNum << " = ";
+ else {
+ // By default, UnnamedVariablePrefix is empty so it matches original behaviour
+ // unless specified.
+ Out << '%' << UnnamedVariablePrefix << SlotNum << " = ";
+ }
}
if (const CallInst *CI = dyn_cast<CallInst>(&I)) {
@@ -4762,6 +4912,20 @@ void BasicBlock::print(raw_ostream &ROS, AssemblyAnnotationWriter *AAW,
W.printBasicBlock(this);
}
+#if defined(ENABLE_ACPO)
+void Loop::printWithFunctionWrapper(
+ raw_ostream &ROS, Function *F, ArrayRef<BasicBlock *> LoopBlocks,
+ BasicBlock *Header, SmallVector<BasicBlock *, 8> ExitBlocks,
+ AssemblyAnnotationWriter *AAW, bool ShouldPreserveUseListOrder,
+ bool IsForDebug) const {
+ SlotTracker SlotTable(F);
+ formatted_raw_ostream OS(ROS);
+ AssemblyWriter W(OS, SlotTable, F->getParent(), AAW, IsForDebug,
+ ShouldPreserveUseListOrder);
+ W.printLoopWithFunctionWrapper(F, LoopBlocks, Header, ExitBlocks);
+}
+#endif
+
void Module::print(raw_ostream &ROS, AssemblyAnnotationWriter *AAW,
bool ShouldPreserveUseListOrder, bool IsForDebug) const {
SlotTracker SlotTable(this);
diff --git a/llvm/lib/Passes/PassBuilder.cpp b/llvm/lib/Passes/PassBuilder.cpp
index a3ccbc6d258f..e2fe3322aef4 100644
--- a/llvm/lib/Passes/PassBuilder.cpp
+++ b/llvm/lib/Passes/PassBuilder.cpp
@@ -267,6 +267,12 @@
#include "llvm/Transforms/Scalar/AutoTuningCompile.h"
#endif
+#if defined(ENABLE_ACPO)
+#include "llvm/Analysis/CallHeight.h"
+#include "llvm/Analysis/DumpCallsite.h"
+#include "llvm/Analysis/DumpFeature.h"
+#endif
+
using namespace llvm;
static const Regex DefaultAliasRegex(
diff --git a/llvm/lib/Passes/PassBuilderPipelines.cpp b/llvm/lib/Passes/PassBuilderPipelines.cpp
index 8009e011833c..de89f5393ba2 100644
--- a/llvm/lib/Passes/PassBuilderPipelines.cpp
+++ b/llvm/lib/Passes/PassBuilderPipelines.cpp
@@ -138,6 +138,12 @@
#include "llvm/Transforms/Scalar/AutoTuningCompile.h"
#endif
+#if defined(ENABLE_ACPO)
+#include "llvm/Analysis/CallHeight.h"
+#include "llvm/Analysis/DumpCallsite.h"
+#include "llvm/Analysis/DumpFeature.h"
+#endif
+
using namespace llvm;
static cl::opt<InliningAdvisorMode> UseInlineAdvisor(
@@ -894,6 +900,14 @@ PassBuilder::buildInlinerPipeline(OptimizationLevel Level,
// make a lot of sense and we should revisit the core CGSCC structure.
CGSCCPassManager &MainCGPipeline = MIWP.getPM();
+#if defined(ENABLE_ACPO)
+ if (EnableFeatureDump) {
+ // Add CallHeight analysis for dump feature
+ MIWP.addModulePass(RequireAnalysisPass<CallHeightAnalysis, Module>());
+ MainCGPipeline.addPass(DumpFeaturePass());
+ }
+#endif
+
// Note: historically, the PruneEH pass was run first to deduce nounwind and
// generally clean up exception handling overhead. It isn't clear this is
// valuable as the inliner doesn't currently care whether it is inlining an
diff --git a/llvm/lib/Passes/PassRegistry.def b/llvm/lib/Passes/PassRegistry.def
index 45a539f14b93..6ef0d6791ff2 100644
--- a/llvm/lib/Passes/PassRegistry.def
+++ b/llvm/lib/Passes/PassRegistry.def
@@ -33,6 +33,10 @@ MODULE_ANALYSIS("ir-similarity", IRSimilarityAnalysis())
MODULE_ANALYSIS("autotuning-dump", AutotuningDumpAnalysis())
#endif
+#if defined(ENABLE_ACPO)
+MODULE_ANALYSIS("call-height", CallHeightAnalysis())
+#endif
+
#ifndef MODULE_ALIAS_ANALYSIS
#define MODULE_ALIAS_ANALYSIS(NAME, CREATE_PASS) \
MODULE_ANALYSIS(NAME, CREATE_PASS)
@@ -215,6 +219,9 @@ CGSCC_PASS("invalidate<all>", InvalidateAllAnalysesPass())
CGSCC_PASS("attributor-cgscc", AttributorCGSCCPass())
CGSCC_PASS("openmp-opt-cgscc", OpenMPOptCGSCCPass())
CGSCC_PASS("no-op-cgscc", NoOpCGSCCPass())
+#if defined(ENABLE_ACPO)
+CGSCC_PASS("dump-feature", DumpFeaturePass())
+#endif
#undef CGSCC_PASS
#ifndef CGSCC_PASS_WITH_PARAMS
@@ -325,6 +332,9 @@ FUNCTION_PASS("view-dom", DomViewer())
FUNCTION_PASS("view-dom-only", DomOnlyViewer())
FUNCTION_PASS("view-post-dom", PostDomViewer())
FUNCTION_PASS("view-post-dom-only", PostDomOnlyViewer())
+#if defined(ENABLE_ACPO)
+FUNCTION_PASS("dump-callsite", DumpCallsitePass())
+#endif
FUNCTION_PASS("fix-irreducible", FixIrreduciblePass())
FUNCTION_PASS("flattencfg", FlattenCFGPass())
FUNCTION_PASS("make-guards-explicit", MakeGuardsExplicitPass())
--
2.38.1.windows.1
Loading...
马建仓 AI 助手
尝试更多
代码解读
代码找茬
代码优化
1
https://gitee.com/xiajingze/llvm.git
[email protected]:xiajingze/llvm.git
xiajingze
llvm
llvm
master

搜索帮助