gcc-avr32.patch 846 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736173717381739174017411742174317441745174617471748174917501751175217531754175517561757175817591760176117621763176417651766176717681769177017711772177317741775177617771778177917801781178217831784178517861787178817891790179117921793179417951796179717981799180018011802180318041805180618071808180918101811181218131814181518161817181818191820182118221823182418251826182718281829183018311832183318341835183618371838183918401841184218431844184518461847184818491850185118521853185418551856185718581859186018611862186318641865186618671868186918701871187218731874187518761877187818791880188118821883188418851886188718881889189018911892189318941895189618971898189919001901190219031904190519061907190819091910191119121913191419151916191719181919192019211922192319241925192619271928192919301931193219331934193519361937193819391940194119421943194419451946194719481949195019511952195319541955195619571958195919601961196219631964196519661967196819691970197119721973197419751976197719781979198019811982198319841985198619871988198919901991199219931994199519961997199819992000200120022003200420052006200720082009201020112012201320142015201620172018201920202021202220232024202520262027202820292030203120322033203420352036203720382039204020412042204320442045204620472048204920502051205220532054205520562057205820592060206120622063206420652066206720682069207020712072207320742075207620772078207920802081208220832084208520862087208820892090209120922093209420952096209720982099210021012102210321042105210621072108210921102111211221132114211521162117211821192120212121222123212421252126212721282129213021312132213321342135213621372138213921402141214221432144214521462147214821492150215121522153215421552156215721582159216021612162216321642165216621672168216921702171217221732174217521762177217821792180218121822183218421852186218721882189219021912192219321942195219621972198219922002201220222032204220522062207220822092210221122122213221422152216221722182219222022212222222322242225222622272228222922302231223222332234223522362237223822392240224122422243224422452246224722482249225022512252225322542255225622572258225922602261226222632264226522662267226822692270227122722273227422752276227722782279228022812282228322842285228622872288228922902291229222932294229522962297229822992300230123022303230423052306230723082309231023112312231323142315231623172318231923202321232223232324232523262327232823292330233123322333233423352336233723382339234023412342234323442345234623472348234923502351235223532354235523562357235823592360236123622363236423652366236723682369237023712372237323742375237623772378237923802381238223832384238523862387238823892390239123922393239423952396239723982399240024012402240324042405240624072408240924102411241224132414241524162417241824192420242124222423242424252426242724282429243024312432243324342435243624372438243924402441244224432444244524462447244824492450245124522453245424552456245724582459246024612462246324642465246624672468246924702471247224732474247524762477247824792480248124822483248424852486248724882489249024912492249324942495249624972498249925002501250225032504250525062507250825092510251125122513251425152516251725182519252025212522252325242525252625272528252925302531253225332534253525362537253825392540254125422543254425452546254725482549255025512552255325542555255625572558255925602561256225632564256525662567256825692570257125722573257425752576257725782579258025812582258325842585258625872588258925902591259225932594259525962597259825992600260126022603260426052606260726082609261026112612261326142615261626172618261926202621262226232624262526262627262826292630263126322633263426352636263726382639264026412642264326442645264626472648264926502651265226532654265526562657265826592660266126622663266426652666266726682669267026712672267326742675267626772678267926802681268226832684268526862687268826892690269126922693269426952696269726982699270027012702270327042705270627072708270927102711271227132714271527162717271827192720272127222723272427252726272727282729273027312732273327342735273627372738273927402741274227432744274527462747274827492750275127522753275427552756275727582759276027612762276327642765276627672768276927702771277227732774277527762777277827792780278127822783278427852786278727882789279027912792279327942795279627972798279928002801280228032804280528062807280828092810281128122813281428152816281728182819282028212822282328242825282628272828282928302831283228332834283528362837283828392840284128422843284428452846284728482849285028512852285328542855285628572858285928602861286228632864286528662867286828692870287128722873287428752876287728782879288028812882288328842885288628872888288928902891289228932894289528962897289828992900290129022903290429052906290729082909291029112912291329142915291629172918291929202921292229232924292529262927292829292930293129322933293429352936293729382939294029412942294329442945294629472948294929502951295229532954295529562957295829592960296129622963296429652966296729682969297029712972297329742975297629772978297929802981298229832984298529862987298829892990299129922993299429952996299729982999300030013002300330043005300630073008300930103011301230133014301530163017301830193020302130223023302430253026302730283029303030313032303330343035303630373038303930403041304230433044304530463047304830493050305130523053305430553056305730583059306030613062306330643065306630673068306930703071307230733074307530763077307830793080308130823083308430853086308730883089309030913092309330943095309630973098309931003101310231033104310531063107310831093110311131123113311431153116311731183119312031213122312331243125312631273128312931303131313231333134313531363137313831393140314131423143314431453146314731483149315031513152315331543155315631573158315931603161316231633164316531663167316831693170317131723173317431753176317731783179318031813182318331843185318631873188318931903191319231933194319531963197319831993200320132023203320432053206320732083209321032113212321332143215321632173218321932203221322232233224322532263227322832293230323132323233323432353236323732383239324032413242324332443245324632473248324932503251325232533254325532563257325832593260326132623263326432653266326732683269327032713272327332743275327632773278327932803281328232833284328532863287328832893290329132923293329432953296329732983299330033013302330333043305330633073308330933103311331233133314331533163317331833193320332133223323332433253326332733283329333033313332333333343335333633373338333933403341334233433344334533463347334833493350335133523353335433553356335733583359336033613362336333643365336633673368336933703371337233733374337533763377337833793380338133823383338433853386338733883389339033913392339333943395339633973398339934003401340234033404340534063407340834093410341134123413341434153416341734183419342034213422342334243425342634273428342934303431343234333434343534363437343834393440344134423443344434453446344734483449345034513452345334543455345634573458345934603461346234633464346534663467346834693470347134723473347434753476347734783479348034813482348334843485348634873488348934903491349234933494349534963497349834993500350135023503350435053506350735083509351035113512351335143515351635173518351935203521352235233524352535263527352835293530353135323533353435353536353735383539354035413542354335443545354635473548354935503551355235533554355535563557355835593560356135623563356435653566356735683569357035713572357335743575357635773578357935803581358235833584358535863587358835893590359135923593359435953596359735983599360036013602360336043605360636073608360936103611361236133614361536163617361836193620362136223623362436253626362736283629363036313632363336343635363636373638363936403641364236433644364536463647364836493650365136523653365436553656365736583659366036613662366336643665366636673668366936703671367236733674367536763677367836793680368136823683368436853686368736883689369036913692369336943695369636973698369937003701370237033704370537063707370837093710371137123713371437153716371737183719372037213722372337243725372637273728372937303731373237333734373537363737373837393740374137423743374437453746374737483749375037513752375337543755375637573758375937603761376237633764376537663767376837693770377137723773377437753776377737783779378037813782378337843785378637873788378937903791379237933794379537963797379837993800380138023803380438053806380738083809381038113812381338143815381638173818381938203821382238233824382538263827382838293830383138323833383438353836383738383839384038413842384338443845384638473848384938503851385238533854385538563857385838593860386138623863386438653866386738683869387038713872387338743875387638773878387938803881388238833884388538863887388838893890389138923893389438953896389738983899390039013902390339043905390639073908390939103911391239133914391539163917391839193920392139223923392439253926392739283929393039313932393339343935393639373938393939403941394239433944394539463947394839493950395139523953395439553956395739583959396039613962396339643965396639673968396939703971397239733974397539763977397839793980398139823983398439853986398739883989399039913992399339943995399639973998399940004001400240034004400540064007400840094010401140124013401440154016401740184019402040214022402340244025402640274028402940304031403240334034403540364037403840394040404140424043404440454046404740484049405040514052405340544055405640574058405940604061406240634064406540664067406840694070407140724073407440754076407740784079408040814082408340844085408640874088408940904091409240934094409540964097409840994100410141024103410441054106410741084109411041114112411341144115411641174118411941204121412241234124412541264127412841294130413141324133413441354136413741384139414041414142414341444145414641474148414941504151415241534154415541564157415841594160416141624163416441654166416741684169417041714172417341744175417641774178417941804181418241834184418541864187418841894190419141924193419441954196419741984199420042014202420342044205420642074208420942104211421242134214421542164217421842194220422142224223422442254226422742284229423042314232423342344235423642374238423942404241424242434244424542464247424842494250425142524253425442554256425742584259426042614262426342644265426642674268426942704271427242734274427542764277427842794280428142824283428442854286428742884289429042914292429342944295429642974298429943004301430243034304430543064307430843094310431143124313431443154316431743184319432043214322432343244325432643274328432943304331433243334334433543364337433843394340434143424343434443454346434743484349435043514352435343544355435643574358435943604361436243634364436543664367436843694370437143724373437443754376437743784379438043814382438343844385438643874388438943904391439243934394439543964397439843994400440144024403440444054406440744084409441044114412441344144415441644174418441944204421442244234424442544264427442844294430443144324433443444354436443744384439444044414442444344444445444644474448444944504451445244534454445544564457445844594460446144624463446444654466446744684469447044714472447344744475447644774478447944804481448244834484448544864487448844894490449144924493449444954496449744984499450045014502450345044505450645074508450945104511451245134514451545164517451845194520452145224523452445254526452745284529453045314532453345344535453645374538453945404541454245434544454545464547454845494550455145524553455445554556455745584559456045614562456345644565456645674568456945704571457245734574457545764577457845794580458145824583458445854586458745884589459045914592459345944595459645974598459946004601460246034604460546064607460846094610461146124613461446154616461746184619462046214622462346244625462646274628462946304631463246334634463546364637463846394640464146424643464446454646464746484649465046514652465346544655465646574658465946604661466246634664466546664667466846694670467146724673467446754676467746784679468046814682468346844685468646874688468946904691469246934694469546964697469846994700470147024703470447054706470747084709471047114712471347144715471647174718471947204721472247234724472547264727472847294730473147324733473447354736473747384739474047414742474347444745474647474748474947504751475247534754475547564757475847594760476147624763476447654766476747684769477047714772477347744775477647774778477947804781478247834784478547864787478847894790479147924793479447954796479747984799480048014802480348044805480648074808480948104811481248134814481548164817481848194820482148224823482448254826482748284829483048314832483348344835483648374838483948404841484248434844484548464847484848494850485148524853485448554856485748584859486048614862486348644865486648674868486948704871487248734874487548764877487848794880488148824883488448854886488748884889489048914892489348944895489648974898489949004901490249034904490549064907490849094910491149124913491449154916491749184919492049214922492349244925492649274928492949304931493249334934493549364937493849394940494149424943494449454946494749484949495049514952495349544955495649574958495949604961496249634964496549664967496849694970497149724973497449754976497749784979498049814982498349844985498649874988498949904991499249934994499549964997499849995000500150025003500450055006500750085009501050115012501350145015501650175018501950205021502250235024502550265027502850295030503150325033503450355036503750385039504050415042504350445045504650475048504950505051505250535054505550565057505850595060506150625063506450655066506750685069507050715072507350745075507650775078507950805081508250835084508550865087508850895090509150925093509450955096509750985099510051015102510351045105510651075108510951105111511251135114511551165117511851195120512151225123512451255126512751285129513051315132513351345135513651375138513951405141514251435144514551465147514851495150515151525153515451555156515751585159516051615162516351645165516651675168516951705171517251735174517551765177517851795180518151825183518451855186518751885189519051915192519351945195519651975198519952005201520252035204520552065207520852095210521152125213521452155216521752185219522052215222522352245225522652275228522952305231523252335234523552365237523852395240524152425243524452455246524752485249525052515252525352545255525652575258525952605261526252635264526552665267526852695270527152725273527452755276527752785279528052815282528352845285528652875288528952905291529252935294529552965297529852995300530153025303530453055306530753085309531053115312531353145315531653175318531953205321532253235324532553265327532853295330533153325333533453355336533753385339534053415342534353445345534653475348534953505351535253535354535553565357535853595360536153625363536453655366536753685369537053715372537353745375537653775378537953805381538253835384538553865387538853895390539153925393539453955396539753985399540054015402540354045405540654075408540954105411541254135414541554165417541854195420542154225423542454255426542754285429543054315432543354345435543654375438543954405441544254435444544554465447544854495450545154525453545454555456545754585459546054615462546354645465546654675468546954705471547254735474547554765477547854795480548154825483548454855486548754885489549054915492549354945495549654975498549955005501550255035504550555065507550855095510551155125513551455155516551755185519552055215522552355245525552655275528552955305531553255335534553555365537553855395540554155425543554455455546554755485549555055515552555355545555555655575558555955605561556255635564556555665567556855695570557155725573557455755576557755785579558055815582558355845585558655875588558955905591559255935594559555965597559855995600560156025603560456055606560756085609561056115612561356145615561656175618561956205621562256235624562556265627562856295630563156325633563456355636563756385639564056415642564356445645564656475648564956505651565256535654565556565657565856595660566156625663566456655666566756685669567056715672567356745675567656775678567956805681568256835684568556865687568856895690569156925693569456955696569756985699570057015702570357045705570657075708570957105711571257135714571557165717571857195720572157225723572457255726572757285729573057315732573357345735573657375738573957405741574257435744574557465747574857495750575157525753575457555756575757585759576057615762576357645765576657675768576957705771577257735774577557765777577857795780578157825783578457855786578757885789579057915792579357945795579657975798579958005801580258035804580558065807580858095810581158125813581458155816581758185819582058215822582358245825582658275828582958305831583258335834583558365837583858395840584158425843584458455846584758485849585058515852585358545855585658575858585958605861586258635864586558665867586858695870587158725873587458755876587758785879588058815882588358845885588658875888588958905891589258935894589558965897589858995900590159025903590459055906590759085909591059115912591359145915591659175918591959205921592259235924592559265927592859295930593159325933593459355936593759385939594059415942594359445945594659475948594959505951595259535954595559565957595859595960596159625963596459655966596759685969597059715972597359745975597659775978597959805981598259835984598559865987598859895990599159925993599459955996599759985999600060016002600360046005600660076008600960106011601260136014601560166017601860196020602160226023602460256026602760286029603060316032603360346035603660376038603960406041604260436044604560466047604860496050605160526053605460556056605760586059606060616062606360646065606660676068606960706071607260736074607560766077607860796080608160826083608460856086608760886089609060916092609360946095609660976098609961006101610261036104610561066107610861096110611161126113611461156116611761186119612061216122612361246125612661276128612961306131613261336134613561366137613861396140614161426143614461456146614761486149615061516152615361546155615661576158615961606161616261636164616561666167616861696170617161726173617461756176617761786179618061816182618361846185618661876188618961906191619261936194619561966197619861996200620162026203620462056206620762086209621062116212621362146215621662176218621962206221622262236224622562266227622862296230623162326233623462356236623762386239624062416242624362446245624662476248624962506251625262536254625562566257625862596260626162626263626462656266626762686269627062716272627362746275627662776278627962806281628262836284628562866287628862896290629162926293629462956296629762986299630063016302630363046305630663076308630963106311631263136314631563166317631863196320632163226323632463256326632763286329633063316332633363346335633663376338633963406341634263436344634563466347634863496350635163526353635463556356635763586359636063616362636363646365636663676368636963706371637263736374637563766377637863796380638163826383638463856386638763886389639063916392639363946395639663976398639964006401640264036404640564066407640864096410641164126413641464156416641764186419642064216422642364246425642664276428642964306431643264336434643564366437643864396440644164426443644464456446644764486449645064516452645364546455645664576458645964606461646264636464646564666467646864696470647164726473647464756476647764786479648064816482648364846485648664876488648964906491649264936494649564966497649864996500650165026503650465056506650765086509651065116512651365146515651665176518651965206521652265236524652565266527652865296530653165326533653465356536653765386539654065416542654365446545654665476548654965506551655265536554655565566557655865596560656165626563656465656566656765686569657065716572657365746575657665776578657965806581658265836584658565866587658865896590659165926593659465956596659765986599660066016602660366046605660666076608660966106611661266136614661566166617661866196620662166226623662466256626662766286629663066316632663366346635663666376638663966406641664266436644664566466647664866496650665166526653665466556656665766586659666066616662666366646665666666676668666966706671667266736674667566766677667866796680668166826683668466856686668766886689669066916692669366946695669666976698669967006701670267036704670567066707670867096710671167126713671467156716671767186719672067216722672367246725672667276728672967306731673267336734673567366737673867396740674167426743674467456746674767486749675067516752675367546755675667576758675967606761676267636764676567666767676867696770677167726773677467756776677767786779678067816782678367846785678667876788678967906791679267936794679567966797679867996800680168026803680468056806680768086809681068116812681368146815681668176818681968206821682268236824682568266827682868296830683168326833683468356836683768386839684068416842684368446845684668476848684968506851685268536854685568566857685868596860686168626863686468656866686768686869687068716872687368746875687668776878687968806881688268836884688568866887688868896890689168926893689468956896689768986899690069016902690369046905690669076908690969106911691269136914691569166917691869196920692169226923692469256926692769286929693069316932693369346935693669376938693969406941694269436944694569466947694869496950695169526953695469556956695769586959696069616962696369646965696669676968696969706971697269736974697569766977697869796980698169826983698469856986698769886989699069916992699369946995699669976998699970007001700270037004700570067007700870097010701170127013701470157016701770187019702070217022702370247025702670277028702970307031703270337034703570367037703870397040704170427043704470457046704770487049705070517052705370547055705670577058705970607061706270637064706570667067706870697070707170727073707470757076707770787079708070817082708370847085708670877088708970907091709270937094709570967097709870997100710171027103710471057106710771087109711071117112711371147115711671177118711971207121712271237124712571267127712871297130713171327133713471357136713771387139714071417142714371447145714671477148714971507151715271537154715571567157715871597160716171627163716471657166716771687169717071717172717371747175717671777178717971807181718271837184718571867187718871897190719171927193719471957196719771987199720072017202720372047205720672077208720972107211721272137214721572167217721872197220722172227223722472257226722772287229723072317232723372347235723672377238723972407241724272437244724572467247724872497250725172527253725472557256725772587259726072617262726372647265726672677268726972707271727272737274727572767277727872797280728172827283728472857286728772887289729072917292729372947295729672977298729973007301730273037304730573067307730873097310731173127313731473157316731773187319732073217322732373247325732673277328732973307331733273337334733573367337733873397340734173427343734473457346734773487349735073517352735373547355735673577358735973607361736273637364736573667367736873697370737173727373737473757376737773787379738073817382738373847385738673877388738973907391739273937394739573967397739873997400740174027403740474057406740774087409741074117412741374147415741674177418741974207421742274237424742574267427742874297430743174327433743474357436743774387439744074417442744374447445744674477448744974507451745274537454745574567457745874597460746174627463746474657466746774687469747074717472747374747475747674777478747974807481748274837484748574867487748874897490749174927493749474957496749774987499750075017502750375047505750675077508750975107511751275137514751575167517751875197520752175227523752475257526752775287529753075317532753375347535753675377538753975407541754275437544754575467547754875497550755175527553755475557556755775587559756075617562756375647565756675677568756975707571757275737574757575767577757875797580758175827583758475857586758775887589759075917592759375947595759675977598759976007601760276037604760576067607760876097610761176127613761476157616761776187619762076217622762376247625762676277628762976307631763276337634763576367637763876397640764176427643764476457646764776487649765076517652765376547655765676577658765976607661766276637664766576667667766876697670767176727673767476757676767776787679768076817682768376847685768676877688768976907691769276937694769576967697769876997700770177027703770477057706770777087709771077117712771377147715771677177718771977207721772277237724772577267727772877297730773177327733773477357736773777387739774077417742774377447745774677477748774977507751775277537754775577567757775877597760776177627763776477657766776777687769777077717772777377747775777677777778777977807781778277837784778577867787778877897790779177927793779477957796779777987799780078017802780378047805780678077808780978107811781278137814781578167817781878197820782178227823782478257826782778287829783078317832783378347835783678377838783978407841784278437844784578467847784878497850785178527853785478557856785778587859786078617862786378647865786678677868786978707871787278737874787578767877787878797880788178827883788478857886788778887889789078917892789378947895789678977898789979007901790279037904790579067907790879097910791179127913791479157916791779187919792079217922792379247925792679277928792979307931793279337934793579367937793879397940794179427943794479457946794779487949795079517952795379547955795679577958795979607961796279637964796579667967796879697970797179727973797479757976797779787979798079817982798379847985798679877988798979907991799279937994799579967997799879998000800180028003800480058006800780088009801080118012801380148015801680178018801980208021802280238024802580268027802880298030803180328033803480358036803780388039804080418042804380448045804680478048804980508051805280538054805580568057805880598060806180628063806480658066806780688069807080718072807380748075807680778078807980808081808280838084808580868087808880898090809180928093809480958096809780988099810081018102810381048105810681078108810981108111811281138114811581168117811881198120812181228123812481258126812781288129813081318132813381348135813681378138813981408141814281438144814581468147814881498150815181528153815481558156815781588159816081618162816381648165816681678168816981708171817281738174817581768177817881798180818181828183818481858186818781888189819081918192819381948195819681978198819982008201820282038204820582068207820882098210821182128213821482158216821782188219822082218222822382248225822682278228822982308231823282338234823582368237823882398240824182428243824482458246824782488249825082518252825382548255825682578258825982608261826282638264826582668267826882698270827182728273827482758276827782788279828082818282828382848285828682878288828982908291829282938294829582968297829882998300830183028303830483058306830783088309831083118312831383148315831683178318831983208321832283238324832583268327832883298330833183328333833483358336833783388339834083418342834383448345834683478348834983508351835283538354835583568357835883598360836183628363836483658366836783688369837083718372837383748375837683778378837983808381838283838384838583868387838883898390839183928393839483958396839783988399840084018402840384048405840684078408840984108411841284138414841584168417841884198420842184228423842484258426842784288429843084318432843384348435843684378438843984408441844284438444844584468447844884498450845184528453845484558456845784588459846084618462846384648465846684678468846984708471847284738474847584768477847884798480848184828483848484858486848784888489849084918492849384948495849684978498849985008501850285038504850585068507850885098510851185128513851485158516851785188519852085218522852385248525852685278528852985308531853285338534853585368537853885398540854185428543854485458546854785488549855085518552855385548555855685578558855985608561856285638564856585668567856885698570857185728573857485758576857785788579858085818582858385848585858685878588858985908591859285938594859585968597859885998600860186028603860486058606860786088609861086118612861386148615861686178618861986208621862286238624862586268627862886298630863186328633863486358636863786388639864086418642864386448645864686478648864986508651865286538654865586568657865886598660866186628663866486658666866786688669867086718672867386748675867686778678867986808681868286838684868586868687868886898690869186928693869486958696869786988699870087018702870387048705870687078708870987108711871287138714871587168717871887198720872187228723872487258726872787288729873087318732873387348735873687378738873987408741874287438744874587468747874887498750875187528753875487558756875787588759876087618762876387648765876687678768876987708771877287738774877587768777877887798780878187828783878487858786878787888789879087918792879387948795879687978798879988008801880288038804880588068807880888098810881188128813881488158816881788188819882088218822882388248825882688278828882988308831883288338834883588368837883888398840884188428843884488458846884788488849885088518852885388548855885688578858885988608861886288638864886588668867886888698870887188728873887488758876887788788879888088818882888388848885888688878888888988908891889288938894889588968897889888998900890189028903890489058906890789088909891089118912891389148915891689178918891989208921892289238924892589268927892889298930893189328933893489358936893789388939894089418942894389448945894689478948894989508951895289538954895589568957895889598960896189628963896489658966896789688969897089718972897389748975897689778978897989808981898289838984898589868987898889898990899189928993899489958996899789988999900090019002900390049005900690079008900990109011901290139014901590169017901890199020902190229023902490259026902790289029903090319032903390349035903690379038903990409041904290439044904590469047904890499050905190529053905490559056905790589059906090619062906390649065906690679068906990709071907290739074907590769077907890799080908190829083908490859086908790889089909090919092909390949095909690979098909991009101910291039104910591069107910891099110911191129113911491159116911791189119912091219122912391249125912691279128912991309131913291339134913591369137913891399140914191429143914491459146914791489149915091519152915391549155915691579158915991609161916291639164916591669167916891699170917191729173917491759176917791789179918091819182918391849185918691879188918991909191919291939194919591969197919891999200920192029203920492059206920792089209921092119212921392149215921692179218921992209221922292239224922592269227922892299230923192329233923492359236923792389239924092419242924392449245924692479248924992509251925292539254925592569257925892599260926192629263926492659266926792689269927092719272927392749275927692779278927992809281928292839284928592869287928892899290929192929293929492959296929792989299930093019302930393049305930693079308930993109311931293139314931593169317931893199320932193229323932493259326932793289329933093319332933393349335933693379338933993409341934293439344934593469347934893499350935193529353935493559356935793589359936093619362936393649365936693679368936993709371937293739374937593769377937893799380938193829383938493859386938793889389939093919392939393949395939693979398939994009401940294039404940594069407940894099410941194129413941494159416941794189419942094219422942394249425942694279428942994309431943294339434943594369437943894399440944194429443944494459446944794489449945094519452945394549455945694579458945994609461946294639464946594669467946894699470947194729473947494759476947794789479948094819482948394849485948694879488948994909491949294939494949594969497949894999500950195029503950495059506950795089509951095119512951395149515951695179518951995209521952295239524952595269527952895299530953195329533953495359536953795389539954095419542954395449545954695479548954995509551955295539554955595569557955895599560956195629563956495659566956795689569957095719572957395749575957695779578957995809581958295839584958595869587958895899590959195929593959495959596959795989599960096019602960396049605960696079608960996109611961296139614961596169617961896199620962196229623962496259626962796289629963096319632963396349635963696379638963996409641964296439644964596469647964896499650965196529653965496559656965796589659966096619662966396649665966696679668966996709671967296739674967596769677967896799680968196829683968496859686968796889689969096919692969396949695969696979698969997009701970297039704970597069707970897099710971197129713971497159716971797189719972097219722972397249725972697279728972997309731973297339734973597369737973897399740974197429743974497459746974797489749975097519752975397549755975697579758975997609761976297639764976597669767976897699770977197729773977497759776977797789779978097819782978397849785978697879788978997909791979297939794979597969797979897999800980198029803980498059806980798089809981098119812981398149815981698179818981998209821982298239824982598269827982898299830983198329833983498359836983798389839984098419842984398449845984698479848984998509851985298539854985598569857985898599860986198629863986498659866986798689869987098719872987398749875987698779878987998809881988298839884988598869887988898899890989198929893989498959896989798989899990099019902990399049905990699079908990999109911991299139914991599169917991899199920992199229923992499259926992799289929993099319932993399349935993699379938993999409941994299439944994599469947994899499950995199529953995499559956995799589959996099619962996399649965996699679968996999709971997299739974997599769977997899799980998199829983998499859986998799889989999099919992999399949995999699979998999910000100011000210003100041000510006100071000810009100101001110012100131001410015100161001710018100191002010021100221002310024100251002610027100281002910030100311003210033100341003510036100371003810039100401004110042100431004410045100461004710048100491005010051100521005310054100551005610057100581005910060100611006210063100641006510066100671006810069100701007110072100731007410075100761007710078100791008010081100821008310084100851008610087100881008910090100911009210093100941009510096100971009810099101001010110102101031010410105101061010710108101091011010111101121011310114101151011610117101181011910120101211012210123101241012510126101271012810129101301013110132101331013410135101361013710138101391014010141101421014310144101451014610147101481014910150101511015210153101541015510156101571015810159101601016110162101631016410165101661016710168101691017010171101721017310174101751017610177101781017910180101811018210183101841018510186101871018810189101901019110192101931019410195101961019710198101991020010201102021020310204102051020610207102081020910210102111021210213102141021510216102171021810219102201022110222102231022410225102261022710228102291023010231102321023310234102351023610237102381023910240102411024210243102441024510246102471024810249102501025110252102531025410255102561025710258102591026010261102621026310264102651026610267102681026910270102711027210273102741027510276102771027810279102801028110282102831028410285102861028710288102891029010291102921029310294102951029610297102981029910300103011030210303103041030510306103071030810309103101031110312103131031410315103161031710318103191032010321103221032310324103251032610327103281032910330103311033210333103341033510336103371033810339103401034110342103431034410345103461034710348103491035010351103521035310354103551035610357103581035910360103611036210363103641036510366103671036810369103701037110372103731037410375103761037710378103791038010381103821038310384103851038610387103881038910390103911039210393103941039510396103971039810399104001040110402104031040410405104061040710408104091041010411104121041310414104151041610417104181041910420104211042210423104241042510426104271042810429104301043110432104331043410435104361043710438104391044010441104421044310444104451044610447104481044910450104511045210453104541045510456104571045810459104601046110462104631046410465104661046710468104691047010471104721047310474104751047610477104781047910480104811048210483104841048510486104871048810489104901049110492104931049410495104961049710498104991050010501105021050310504105051050610507105081050910510105111051210513105141051510516105171051810519105201052110522105231052410525105261052710528105291053010531105321053310534105351053610537105381053910540105411054210543105441054510546105471054810549105501055110552105531055410555105561055710558105591056010561105621056310564105651056610567105681056910570105711057210573105741057510576105771057810579105801058110582105831058410585105861058710588105891059010591105921059310594105951059610597105981059910600106011060210603106041060510606106071060810609106101061110612106131061410615106161061710618106191062010621106221062310624106251062610627106281062910630106311063210633106341063510636106371063810639106401064110642106431064410645106461064710648106491065010651106521065310654106551065610657106581065910660106611066210663106641066510666106671066810669106701067110672106731067410675106761067710678106791068010681106821068310684106851068610687106881068910690106911069210693106941069510696106971069810699107001070110702107031070410705107061070710708107091071010711107121071310714107151071610717107181071910720107211072210723107241072510726107271072810729107301073110732107331073410735107361073710738107391074010741107421074310744107451074610747107481074910750107511075210753107541075510756107571075810759107601076110762107631076410765107661076710768107691077010771107721077310774107751077610777107781077910780107811078210783107841078510786107871078810789107901079110792107931079410795107961079710798107991080010801108021080310804108051080610807108081080910810108111081210813108141081510816108171081810819108201082110822108231082410825108261082710828108291083010831108321083310834108351083610837108381083910840108411084210843108441084510846108471084810849108501085110852108531085410855108561085710858108591086010861108621086310864108651086610867108681086910870108711087210873108741087510876108771087810879108801088110882108831088410885108861088710888108891089010891108921089310894108951089610897108981089910900109011090210903109041090510906109071090810909109101091110912109131091410915109161091710918109191092010921109221092310924109251092610927109281092910930109311093210933109341093510936109371093810939109401094110942109431094410945109461094710948109491095010951109521095310954109551095610957109581095910960109611096210963109641096510966109671096810969109701097110972109731097410975109761097710978109791098010981109821098310984109851098610987109881098910990109911099210993109941099510996109971099810999110001100111002110031100411005110061100711008110091101011011110121101311014110151101611017110181101911020110211102211023110241102511026110271102811029110301103111032110331103411035110361103711038110391104011041110421104311044110451104611047110481104911050110511105211053110541105511056110571105811059110601106111062110631106411065110661106711068110691107011071110721107311074110751107611077110781107911080110811108211083110841108511086110871108811089110901109111092110931109411095110961109711098110991110011101111021110311104111051110611107111081110911110111111111211113111141111511116111171111811119111201112111122111231112411125111261112711128111291113011131111321113311134111351113611137111381113911140111411114211143111441114511146111471114811149111501115111152111531115411155111561115711158111591116011161111621116311164111651116611167111681116911170111711117211173111741117511176111771117811179111801118111182111831118411185111861118711188111891119011191111921119311194111951119611197111981119911200112011120211203112041120511206112071120811209112101121111212112131121411215112161121711218112191122011221112221122311224112251122611227112281122911230112311123211233112341123511236112371123811239112401124111242112431124411245112461124711248112491125011251112521125311254112551125611257112581125911260112611126211263112641126511266112671126811269112701127111272112731127411275112761127711278112791128011281112821128311284112851128611287112881128911290112911129211293112941129511296112971129811299113001130111302113031130411305113061130711308113091131011311113121131311314113151131611317113181131911320113211132211323113241132511326113271132811329113301133111332113331133411335113361133711338113391134011341113421134311344113451134611347113481134911350113511135211353113541135511356113571135811359113601136111362113631136411365113661136711368113691137011371113721137311374113751137611377113781137911380113811138211383113841138511386113871138811389113901139111392113931139411395113961139711398113991140011401114021140311404114051140611407114081140911410114111141211413114141141511416114171141811419114201142111422114231142411425114261142711428114291143011431114321143311434114351143611437114381143911440114411144211443114441144511446114471144811449114501145111452114531145411455114561145711458114591146011461114621146311464114651146611467114681146911470114711147211473114741147511476114771147811479114801148111482114831148411485114861148711488114891149011491114921149311494114951149611497114981149911500115011150211503115041150511506115071150811509115101151111512115131151411515115161151711518115191152011521115221152311524115251152611527115281152911530115311153211533115341153511536115371153811539115401154111542115431154411545115461154711548115491155011551115521155311554115551155611557115581155911560115611156211563115641156511566115671156811569115701157111572115731157411575115761157711578115791158011581115821158311584115851158611587115881158911590115911159211593115941159511596115971159811599116001160111602116031160411605116061160711608116091161011611116121161311614116151161611617116181161911620116211162211623116241162511626116271162811629116301163111632116331163411635116361163711638116391164011641116421164311644116451164611647116481164911650116511165211653116541165511656116571165811659116601166111662116631166411665116661166711668116691167011671116721167311674116751167611677116781167911680116811168211683116841168511686116871168811689116901169111692116931169411695116961169711698116991170011701117021170311704117051170611707117081170911710117111171211713117141171511716117171171811719117201172111722117231172411725117261172711728117291173011731117321173311734117351173611737117381173911740117411174211743117441174511746117471174811749117501175111752117531175411755117561175711758117591176011761117621176311764117651176611767117681176911770117711177211773117741177511776117771177811779117801178111782117831178411785117861178711788117891179011791117921179311794117951179611797117981179911800118011180211803118041180511806118071180811809118101181111812118131181411815118161181711818118191182011821118221182311824118251182611827118281182911830118311183211833118341183511836118371183811839118401184111842118431184411845118461184711848118491185011851118521185311854118551185611857118581185911860118611186211863118641186511866118671186811869118701187111872118731187411875118761187711878118791188011881118821188311884118851188611887118881188911890118911189211893118941189511896118971189811899119001190111902119031190411905119061190711908119091191011911119121191311914119151191611917119181191911920119211192211923119241192511926119271192811929119301193111932119331193411935119361193711938119391194011941119421194311944119451194611947119481194911950119511195211953119541195511956119571195811959119601196111962119631196411965119661196711968119691197011971119721197311974119751197611977119781197911980119811198211983119841198511986119871198811989119901199111992119931199411995119961199711998119991200012001120021200312004120051200612007120081200912010120111201212013120141201512016120171201812019120201202112022120231202412025120261202712028120291203012031120321203312034120351203612037120381203912040120411204212043120441204512046120471204812049120501205112052120531205412055120561205712058120591206012061120621206312064120651206612067120681206912070120711207212073120741207512076120771207812079120801208112082120831208412085120861208712088120891209012091120921209312094120951209612097120981209912100121011210212103121041210512106121071210812109121101211112112121131211412115121161211712118121191212012121121221212312124121251212612127121281212912130121311213212133121341213512136121371213812139121401214112142121431214412145121461214712148121491215012151121521215312154121551215612157121581215912160121611216212163121641216512166121671216812169121701217112172121731217412175121761217712178121791218012181121821218312184121851218612187121881218912190121911219212193121941219512196121971219812199122001220112202122031220412205122061220712208122091221012211122121221312214122151221612217122181221912220122211222212223122241222512226122271222812229122301223112232122331223412235122361223712238122391224012241122421224312244122451224612247122481224912250122511225212253122541225512256122571225812259122601226112262122631226412265122661226712268122691227012271122721227312274122751227612277122781227912280122811228212283122841228512286122871228812289122901229112292122931229412295122961229712298122991230012301123021230312304123051230612307123081230912310123111231212313123141231512316123171231812319123201232112322123231232412325123261232712328123291233012331123321233312334123351233612337123381233912340123411234212343123441234512346123471234812349123501235112352123531235412355123561235712358123591236012361123621236312364123651236612367123681236912370123711237212373123741237512376123771237812379123801238112382123831238412385123861238712388123891239012391123921239312394123951239612397123981239912400124011240212403124041240512406124071240812409124101241112412124131241412415124161241712418124191242012421124221242312424124251242612427124281242912430124311243212433124341243512436124371243812439124401244112442124431244412445124461244712448124491245012451124521245312454124551245612457124581245912460124611246212463124641246512466124671246812469124701247112472124731247412475124761247712478124791248012481124821248312484124851248612487124881248912490124911249212493124941249512496124971249812499125001250112502125031250412505125061250712508125091251012511125121251312514125151251612517125181251912520125211252212523125241252512526125271252812529125301253112532125331253412535125361253712538125391254012541125421254312544125451254612547125481254912550125511255212553125541255512556125571255812559125601256112562125631256412565125661256712568125691257012571125721257312574125751257612577125781257912580125811258212583125841258512586125871258812589125901259112592125931259412595125961259712598125991260012601126021260312604126051260612607126081260912610126111261212613126141261512616126171261812619126201262112622126231262412625126261262712628126291263012631126321263312634126351263612637126381263912640126411264212643126441264512646126471264812649126501265112652126531265412655126561265712658126591266012661126621266312664126651266612667126681266912670126711267212673126741267512676126771267812679126801268112682126831268412685126861268712688126891269012691126921269312694126951269612697126981269912700127011270212703127041270512706127071270812709127101271112712127131271412715127161271712718127191272012721127221272312724127251272612727127281272912730127311273212733127341273512736127371273812739127401274112742127431274412745127461274712748127491275012751127521275312754127551275612757127581275912760127611276212763127641276512766127671276812769127701277112772127731277412775127761277712778127791278012781127821278312784127851278612787127881278912790127911279212793127941279512796127971279812799128001280112802128031280412805128061280712808128091281012811128121281312814128151281612817128181281912820128211282212823128241282512826128271282812829128301283112832128331283412835128361283712838128391284012841128421284312844128451284612847128481284912850128511285212853128541285512856128571285812859128601286112862128631286412865128661286712868128691287012871128721287312874128751287612877128781287912880128811288212883128841288512886128871288812889128901289112892128931289412895128961289712898128991290012901129021290312904129051290612907129081290912910129111291212913129141291512916129171291812919129201292112922129231292412925129261292712928129291293012931129321293312934129351293612937129381293912940129411294212943129441294512946129471294812949129501295112952129531295412955129561295712958129591296012961129621296312964129651296612967129681296912970129711297212973129741297512976129771297812979129801298112982129831298412985129861298712988129891299012991129921299312994129951299612997129981299913000130011300213003130041300513006130071300813009130101301113012130131301413015130161301713018130191302013021130221302313024130251302613027130281302913030130311303213033130341303513036130371303813039130401304113042130431304413045130461304713048130491305013051130521305313054130551305613057130581305913060130611306213063130641306513066130671306813069130701307113072130731307413075130761307713078130791308013081130821308313084130851308613087130881308913090130911309213093130941309513096130971309813099131001310113102131031310413105131061310713108131091311013111131121311313114131151311613117131181311913120131211312213123131241312513126131271312813129131301313113132131331313413135131361313713138131391314013141131421314313144131451314613147131481314913150131511315213153131541315513156131571315813159131601316113162131631316413165131661316713168131691317013171131721317313174131751317613177131781317913180131811318213183131841318513186131871318813189131901319113192131931319413195131961319713198131991320013201132021320313204132051320613207132081320913210132111321213213132141321513216132171321813219132201322113222132231322413225132261322713228132291323013231132321323313234132351323613237132381323913240132411324213243132441324513246132471324813249132501325113252132531325413255132561325713258132591326013261132621326313264132651326613267132681326913270132711327213273132741327513276132771327813279132801328113282132831328413285132861328713288132891329013291132921329313294132951329613297132981329913300133011330213303133041330513306133071330813309133101331113312133131331413315133161331713318133191332013321133221332313324133251332613327133281332913330133311333213333133341333513336133371333813339133401334113342133431334413345133461334713348133491335013351133521335313354133551335613357133581335913360133611336213363133641336513366133671336813369133701337113372133731337413375133761337713378133791338013381133821338313384133851338613387133881338913390133911339213393133941339513396133971339813399134001340113402134031340413405134061340713408134091341013411134121341313414134151341613417134181341913420134211342213423134241342513426134271342813429134301343113432134331343413435134361343713438134391344013441134421344313444134451344613447134481344913450134511345213453134541345513456134571345813459134601346113462134631346413465134661346713468134691347013471134721347313474134751347613477134781347913480134811348213483134841348513486134871348813489134901349113492134931349413495134961349713498134991350013501135021350313504135051350613507135081350913510135111351213513135141351513516135171351813519135201352113522135231352413525135261352713528135291353013531135321353313534135351353613537135381353913540135411354213543135441354513546135471354813549135501355113552135531355413555135561355713558135591356013561135621356313564135651356613567135681356913570135711357213573135741357513576135771357813579135801358113582135831358413585135861358713588135891359013591135921359313594135951359613597135981359913600136011360213603136041360513606136071360813609136101361113612136131361413615136161361713618136191362013621136221362313624136251362613627136281362913630136311363213633136341363513636136371363813639136401364113642136431364413645136461364713648136491365013651136521365313654136551365613657136581365913660136611366213663136641366513666136671366813669136701367113672136731367413675136761367713678136791368013681136821368313684136851368613687136881368913690136911369213693136941369513696136971369813699137001370113702137031370413705137061370713708137091371013711137121371313714137151371613717137181371913720137211372213723137241372513726137271372813729137301373113732137331373413735137361373713738137391374013741137421374313744137451374613747137481374913750137511375213753137541375513756137571375813759137601376113762137631376413765137661376713768137691377013771137721377313774137751377613777137781377913780137811378213783137841378513786137871378813789137901379113792137931379413795137961379713798137991380013801138021380313804138051380613807138081380913810138111381213813138141381513816138171381813819138201382113822138231382413825138261382713828138291383013831138321383313834138351383613837138381383913840138411384213843138441384513846138471384813849138501385113852138531385413855138561385713858138591386013861138621386313864138651386613867138681386913870138711387213873138741387513876138771387813879138801388113882138831388413885138861388713888138891389013891138921389313894138951389613897138981389913900139011390213903139041390513906139071390813909139101391113912139131391413915139161391713918139191392013921139221392313924139251392613927139281392913930139311393213933139341393513936139371393813939139401394113942139431394413945139461394713948139491395013951139521395313954139551395613957139581395913960139611396213963139641396513966139671396813969139701397113972139731397413975139761397713978139791398013981139821398313984139851398613987139881398913990139911399213993139941399513996139971399813999140001400114002140031400414005140061400714008140091401014011140121401314014140151401614017140181401914020140211402214023140241402514026140271402814029140301403114032140331403414035140361403714038140391404014041140421404314044140451404614047140481404914050140511405214053140541405514056140571405814059140601406114062140631406414065140661406714068140691407014071140721407314074140751407614077140781407914080140811408214083140841408514086140871408814089140901409114092140931409414095140961409714098140991410014101141021410314104141051410614107141081410914110141111411214113141141411514116141171411814119141201412114122141231412414125141261412714128141291413014131141321413314134141351413614137141381413914140141411414214143141441414514146141471414814149141501415114152141531415414155141561415714158141591416014161141621416314164141651416614167141681416914170141711417214173141741417514176141771417814179141801418114182141831418414185141861418714188141891419014191141921419314194141951419614197141981419914200142011420214203142041420514206142071420814209142101421114212142131421414215142161421714218142191422014221142221422314224142251422614227142281422914230142311423214233142341423514236142371423814239142401424114242142431424414245142461424714248142491425014251142521425314254142551425614257142581425914260142611426214263142641426514266142671426814269142701427114272142731427414275142761427714278142791428014281142821428314284142851428614287142881428914290142911429214293142941429514296142971429814299143001430114302143031430414305143061430714308143091431014311143121431314314143151431614317143181431914320143211432214323143241432514326143271432814329143301433114332143331433414335143361433714338143391434014341143421434314344143451434614347143481434914350143511435214353143541435514356143571435814359143601436114362143631436414365143661436714368143691437014371143721437314374143751437614377143781437914380143811438214383143841438514386143871438814389143901439114392143931439414395143961439714398143991440014401144021440314404144051440614407144081440914410144111441214413144141441514416144171441814419144201442114422144231442414425144261442714428144291443014431144321443314434144351443614437144381443914440144411444214443144441444514446144471444814449144501445114452144531445414455144561445714458144591446014461144621446314464144651446614467144681446914470144711447214473144741447514476144771447814479144801448114482144831448414485144861448714488144891449014491144921449314494144951449614497144981449914500145011450214503145041450514506145071450814509145101451114512145131451414515145161451714518145191452014521145221452314524145251452614527145281452914530145311453214533145341453514536145371453814539145401454114542145431454414545145461454714548145491455014551145521455314554145551455614557145581455914560145611456214563145641456514566145671456814569145701457114572145731457414575145761457714578145791458014581145821458314584145851458614587145881458914590145911459214593145941459514596145971459814599146001460114602146031460414605146061460714608146091461014611146121461314614146151461614617146181461914620146211462214623146241462514626146271462814629146301463114632146331463414635146361463714638146391464014641146421464314644146451464614647146481464914650146511465214653146541465514656146571465814659146601466114662146631466414665146661466714668146691467014671146721467314674146751467614677146781467914680146811468214683146841468514686146871468814689146901469114692146931469414695146961469714698146991470014701147021470314704147051470614707147081470914710147111471214713147141471514716147171471814719147201472114722147231472414725147261472714728147291473014731147321473314734147351473614737147381473914740147411474214743147441474514746147471474814749147501475114752147531475414755147561475714758147591476014761147621476314764147651476614767147681476914770147711477214773147741477514776147771477814779147801478114782147831478414785147861478714788147891479014791147921479314794147951479614797147981479914800148011480214803148041480514806148071480814809148101481114812148131481414815148161481714818148191482014821148221482314824148251482614827148281482914830148311483214833148341483514836148371483814839148401484114842148431484414845148461484714848148491485014851148521485314854148551485614857148581485914860148611486214863148641486514866148671486814869148701487114872148731487414875148761487714878148791488014881148821488314884148851488614887148881488914890148911489214893148941489514896148971489814899149001490114902149031490414905149061490714908149091491014911149121491314914149151491614917149181491914920149211492214923149241492514926149271492814929149301493114932149331493414935149361493714938149391494014941149421494314944149451494614947149481494914950149511495214953149541495514956149571495814959149601496114962149631496414965149661496714968149691497014971149721497314974149751497614977149781497914980149811498214983149841498514986149871498814989149901499114992149931499414995149961499714998149991500015001150021500315004150051500615007150081500915010150111501215013150141501515016150171501815019150201502115022150231502415025150261502715028150291503015031150321503315034150351503615037150381503915040150411504215043150441504515046150471504815049150501505115052150531505415055150561505715058150591506015061150621506315064150651506615067150681506915070150711507215073150741507515076150771507815079150801508115082150831508415085150861508715088150891509015091150921509315094150951509615097150981509915100151011510215103151041510515106151071510815109151101511115112151131511415115151161511715118151191512015121151221512315124151251512615127151281512915130151311513215133151341513515136151371513815139151401514115142151431514415145151461514715148151491515015151151521515315154151551515615157151581515915160151611516215163151641516515166151671516815169151701517115172151731517415175151761517715178151791518015181151821518315184151851518615187151881518915190151911519215193151941519515196151971519815199152001520115202152031520415205152061520715208152091521015211152121521315214152151521615217152181521915220152211522215223152241522515226152271522815229152301523115232152331523415235152361523715238152391524015241152421524315244152451524615247152481524915250152511525215253152541525515256152571525815259152601526115262152631526415265152661526715268152691527015271152721527315274152751527615277152781527915280152811528215283152841528515286152871528815289152901529115292152931529415295152961529715298152991530015301153021530315304153051530615307153081530915310153111531215313153141531515316153171531815319153201532115322153231532415325153261532715328153291533015331153321533315334153351533615337153381533915340153411534215343153441534515346153471534815349153501535115352153531535415355153561535715358153591536015361153621536315364153651536615367153681536915370153711537215373153741537515376153771537815379153801538115382153831538415385153861538715388153891539015391153921539315394153951539615397153981539915400154011540215403154041540515406154071540815409154101541115412154131541415415154161541715418154191542015421154221542315424154251542615427154281542915430154311543215433154341543515436154371543815439154401544115442154431544415445154461544715448154491545015451154521545315454154551545615457154581545915460154611546215463154641546515466154671546815469154701547115472154731547415475154761547715478154791548015481154821548315484154851548615487154881548915490154911549215493154941549515496154971549815499155001550115502155031550415505155061550715508155091551015511155121551315514155151551615517155181551915520155211552215523155241552515526155271552815529155301553115532155331553415535155361553715538155391554015541155421554315544155451554615547155481554915550155511555215553155541555515556155571555815559155601556115562155631556415565155661556715568155691557015571155721557315574155751557615577155781557915580155811558215583155841558515586155871558815589155901559115592155931559415595155961559715598155991560015601156021560315604156051560615607156081560915610156111561215613156141561515616156171561815619156201562115622156231562415625156261562715628156291563015631156321563315634156351563615637156381563915640156411564215643156441564515646156471564815649156501565115652156531565415655156561565715658156591566015661156621566315664156651566615667156681566915670156711567215673156741567515676156771567815679156801568115682156831568415685156861568715688156891569015691156921569315694156951569615697156981569915700157011570215703157041570515706157071570815709157101571115712157131571415715157161571715718157191572015721157221572315724157251572615727157281572915730157311573215733157341573515736157371573815739157401574115742157431574415745157461574715748157491575015751157521575315754157551575615757157581575915760157611576215763157641576515766157671576815769157701577115772157731577415775157761577715778157791578015781157821578315784157851578615787157881578915790157911579215793157941579515796157971579815799158001580115802158031580415805158061580715808158091581015811158121581315814158151581615817158181581915820158211582215823158241582515826158271582815829158301583115832158331583415835158361583715838158391584015841158421584315844158451584615847158481584915850158511585215853158541585515856158571585815859158601586115862158631586415865158661586715868158691587015871158721587315874158751587615877158781587915880158811588215883158841588515886158871588815889158901589115892158931589415895158961589715898158991590015901159021590315904159051590615907159081590915910159111591215913159141591515916159171591815919159201592115922159231592415925159261592715928159291593015931159321593315934159351593615937159381593915940159411594215943159441594515946159471594815949159501595115952159531595415955159561595715958159591596015961159621596315964159651596615967159681596915970159711597215973159741597515976159771597815979159801598115982159831598415985159861598715988159891599015991159921599315994159951599615997159981599916000160011600216003160041600516006160071600816009160101601116012160131601416015160161601716018160191602016021160221602316024160251602616027160281602916030160311603216033160341603516036160371603816039160401604116042160431604416045160461604716048160491605016051160521605316054160551605616057160581605916060160611606216063160641606516066160671606816069160701607116072160731607416075160761607716078160791608016081160821608316084160851608616087160881608916090160911609216093160941609516096160971609816099161001610116102161031610416105161061610716108161091611016111161121611316114161151611616117161181611916120161211612216123161241612516126161271612816129161301613116132161331613416135161361613716138161391614016141161421614316144161451614616147161481614916150161511615216153161541615516156161571615816159161601616116162161631616416165161661616716168161691617016171161721617316174161751617616177161781617916180161811618216183161841618516186161871618816189161901619116192161931619416195161961619716198161991620016201162021620316204162051620616207162081620916210162111621216213162141621516216162171621816219162201622116222162231622416225162261622716228162291623016231162321623316234162351623616237162381623916240162411624216243162441624516246162471624816249162501625116252162531625416255162561625716258162591626016261162621626316264162651626616267162681626916270162711627216273162741627516276162771627816279162801628116282162831628416285162861628716288162891629016291162921629316294162951629616297162981629916300163011630216303163041630516306163071630816309163101631116312163131631416315163161631716318163191632016321163221632316324163251632616327163281632916330163311633216333163341633516336163371633816339163401634116342163431634416345163461634716348163491635016351163521635316354163551635616357163581635916360163611636216363163641636516366163671636816369163701637116372163731637416375163761637716378163791638016381163821638316384163851638616387163881638916390163911639216393163941639516396163971639816399164001640116402164031640416405164061640716408164091641016411164121641316414164151641616417164181641916420164211642216423164241642516426164271642816429164301643116432164331643416435164361643716438164391644016441164421644316444164451644616447164481644916450164511645216453164541645516456164571645816459164601646116462164631646416465164661646716468164691647016471164721647316474164751647616477164781647916480164811648216483164841648516486164871648816489164901649116492164931649416495164961649716498164991650016501165021650316504165051650616507165081650916510165111651216513165141651516516165171651816519165201652116522165231652416525165261652716528165291653016531165321653316534165351653616537165381653916540165411654216543165441654516546165471654816549165501655116552165531655416555165561655716558165591656016561165621656316564165651656616567165681656916570165711657216573165741657516576165771657816579165801658116582165831658416585165861658716588165891659016591165921659316594165951659616597165981659916600166011660216603166041660516606166071660816609166101661116612166131661416615166161661716618166191662016621166221662316624166251662616627166281662916630166311663216633166341663516636166371663816639166401664116642166431664416645166461664716648166491665016651166521665316654166551665616657166581665916660166611666216663166641666516666166671666816669166701667116672166731667416675166761667716678166791668016681166821668316684166851668616687166881668916690166911669216693166941669516696166971669816699167001670116702167031670416705167061670716708167091671016711167121671316714167151671616717167181671916720167211672216723167241672516726167271672816729167301673116732167331673416735167361673716738167391674016741167421674316744167451674616747167481674916750167511675216753167541675516756167571675816759167601676116762167631676416765167661676716768167691677016771167721677316774167751677616777167781677916780167811678216783167841678516786167871678816789167901679116792167931679416795167961679716798167991680016801168021680316804168051680616807168081680916810168111681216813168141681516816168171681816819168201682116822168231682416825168261682716828168291683016831168321683316834168351683616837168381683916840168411684216843168441684516846168471684816849168501685116852168531685416855168561685716858168591686016861168621686316864168651686616867168681686916870168711687216873168741687516876168771687816879168801688116882168831688416885168861688716888168891689016891168921689316894168951689616897168981689916900169011690216903169041690516906169071690816909169101691116912169131691416915169161691716918169191692016921169221692316924169251692616927169281692916930169311693216933169341693516936169371693816939169401694116942169431694416945169461694716948169491695016951169521695316954169551695616957169581695916960169611696216963169641696516966169671696816969169701697116972169731697416975169761697716978169791698016981169821698316984169851698616987169881698916990169911699216993169941699516996169971699816999170001700117002170031700417005170061700717008170091701017011170121701317014170151701617017170181701917020170211702217023170241702517026170271702817029170301703117032170331703417035170361703717038170391704017041170421704317044170451704617047170481704917050170511705217053170541705517056170571705817059170601706117062170631706417065170661706717068170691707017071170721707317074170751707617077170781707917080170811708217083170841708517086170871708817089170901709117092170931709417095170961709717098170991710017101171021710317104171051710617107171081710917110171111711217113171141711517116171171711817119171201712117122171231712417125171261712717128171291713017131171321713317134171351713617137171381713917140171411714217143171441714517146171471714817149171501715117152171531715417155171561715717158171591716017161171621716317164171651716617167171681716917170171711717217173171741717517176171771717817179171801718117182171831718417185171861718717188171891719017191171921719317194171951719617197171981719917200172011720217203172041720517206172071720817209172101721117212172131721417215172161721717218172191722017221172221722317224172251722617227172281722917230172311723217233172341723517236172371723817239172401724117242172431724417245172461724717248172491725017251172521725317254172551725617257172581725917260172611726217263172641726517266172671726817269172701727117272172731727417275172761727717278172791728017281172821728317284172851728617287172881728917290172911729217293172941729517296172971729817299173001730117302173031730417305173061730717308173091731017311173121731317314173151731617317173181731917320173211732217323173241732517326173271732817329173301733117332173331733417335173361733717338173391734017341173421734317344173451734617347173481734917350173511735217353173541735517356173571735817359173601736117362173631736417365173661736717368173691737017371173721737317374173751737617377173781737917380173811738217383173841738517386173871738817389173901739117392173931739417395173961739717398173991740017401174021740317404174051740617407174081740917410174111741217413174141741517416174171741817419174201742117422174231742417425174261742717428174291743017431174321743317434174351743617437174381743917440174411744217443174441744517446174471744817449174501745117452174531745417455174561745717458174591746017461174621746317464174651746617467174681746917470174711747217473174741747517476174771747817479174801748117482174831748417485174861748717488174891749017491174921749317494174951749617497174981749917500175011750217503175041750517506175071750817509175101751117512175131751417515175161751717518175191752017521175221752317524175251752617527175281752917530175311753217533175341753517536175371753817539175401754117542175431754417545175461754717548175491755017551175521755317554175551755617557175581755917560175611756217563175641756517566175671756817569175701757117572175731757417575175761757717578175791758017581175821758317584175851758617587175881758917590175911759217593175941759517596175971759817599176001760117602176031760417605176061760717608176091761017611176121761317614176151761617617176181761917620176211762217623176241762517626176271762817629176301763117632176331763417635176361763717638176391764017641176421764317644176451764617647176481764917650176511765217653176541765517656176571765817659176601766117662176631766417665176661766717668176691767017671176721767317674176751767617677176781767917680176811768217683176841768517686176871768817689176901769117692176931769417695176961769717698176991770017701177021770317704177051770617707177081770917710177111771217713177141771517716177171771817719177201772117722177231772417725177261772717728177291773017731177321773317734177351773617737177381773917740177411774217743177441774517746177471774817749177501775117752177531775417755177561775717758177591776017761177621776317764177651776617767177681776917770177711777217773177741777517776177771777817779177801778117782177831778417785177861778717788177891779017791177921779317794177951779617797177981779917800178011780217803178041780517806178071780817809178101781117812178131781417815178161781717818178191782017821178221782317824178251782617827178281782917830178311783217833178341783517836178371783817839178401784117842178431784417845178461784717848178491785017851178521785317854178551785617857178581785917860178611786217863178641786517866178671786817869178701787117872178731787417875178761787717878178791788017881178821788317884178851788617887178881788917890178911789217893178941789517896178971789817899179001790117902179031790417905179061790717908179091791017911179121791317914179151791617917179181791917920179211792217923179241792517926179271792817929179301793117932179331793417935179361793717938179391794017941179421794317944179451794617947179481794917950179511795217953179541795517956179571795817959179601796117962179631796417965179661796717968179691797017971179721797317974179751797617977179781797917980179811798217983179841798517986179871798817989179901799117992179931799417995179961799717998179991800018001180021800318004180051800618007180081800918010180111801218013180141801518016180171801818019180201802118022180231802418025180261802718028180291803018031180321803318034180351803618037180381803918040180411804218043180441804518046180471804818049180501805118052180531805418055180561805718058180591806018061180621806318064180651806618067180681806918070180711807218073180741807518076180771807818079180801808118082180831808418085180861808718088180891809018091180921809318094180951809618097180981809918100181011810218103181041810518106181071810818109181101811118112181131811418115181161811718118181191812018121181221812318124181251812618127181281812918130181311813218133181341813518136181371813818139181401814118142181431814418145181461814718148181491815018151181521815318154181551815618157181581815918160181611816218163181641816518166181671816818169181701817118172181731817418175181761817718178181791818018181181821818318184181851818618187181881818918190181911819218193181941819518196181971819818199182001820118202182031820418205182061820718208182091821018211182121821318214182151821618217182181821918220182211822218223182241822518226182271822818229182301823118232182331823418235182361823718238182391824018241182421824318244182451824618247182481824918250182511825218253182541825518256182571825818259182601826118262182631826418265182661826718268182691827018271182721827318274182751827618277182781827918280182811828218283182841828518286182871828818289182901829118292182931829418295182961829718298182991830018301183021830318304183051830618307183081830918310183111831218313183141831518316183171831818319183201832118322183231832418325183261832718328183291833018331183321833318334183351833618337183381833918340183411834218343183441834518346183471834818349183501835118352183531835418355183561835718358183591836018361183621836318364183651836618367183681836918370183711837218373183741837518376183771837818379183801838118382183831838418385183861838718388183891839018391183921839318394183951839618397183981839918400184011840218403184041840518406184071840818409184101841118412184131841418415184161841718418184191842018421184221842318424184251842618427184281842918430184311843218433184341843518436184371843818439184401844118442184431844418445184461844718448184491845018451184521845318454184551845618457184581845918460184611846218463184641846518466184671846818469184701847118472184731847418475184761847718478184791848018481184821848318484184851848618487184881848918490184911849218493184941849518496184971849818499185001850118502185031850418505185061850718508185091851018511185121851318514185151851618517185181851918520185211852218523185241852518526185271852818529185301853118532185331853418535185361853718538185391854018541185421854318544185451854618547185481854918550185511855218553185541855518556185571855818559185601856118562185631856418565185661856718568185691857018571185721857318574185751857618577185781857918580185811858218583185841858518586185871858818589185901859118592185931859418595185961859718598185991860018601186021860318604186051860618607186081860918610186111861218613186141861518616186171861818619186201862118622186231862418625186261862718628186291863018631186321863318634186351863618637186381863918640186411864218643186441864518646186471864818649186501865118652186531865418655186561865718658186591866018661186621866318664186651866618667186681866918670186711867218673186741867518676186771867818679186801868118682186831868418685186861868718688186891869018691186921869318694186951869618697186981869918700187011870218703187041870518706187071870818709187101871118712187131871418715187161871718718187191872018721187221872318724187251872618727187281872918730187311873218733187341873518736187371873818739187401874118742187431874418745187461874718748187491875018751187521875318754187551875618757187581875918760187611876218763187641876518766187671876818769187701877118772187731877418775187761877718778187791878018781187821878318784187851878618787187881878918790187911879218793187941879518796187971879818799188001880118802188031880418805188061880718808188091881018811188121881318814188151881618817188181881918820188211882218823188241882518826188271882818829188301883118832188331883418835188361883718838188391884018841188421884318844188451884618847188481884918850188511885218853188541885518856188571885818859188601886118862188631886418865188661886718868188691887018871188721887318874188751887618877188781887918880188811888218883188841888518886188871888818889188901889118892188931889418895188961889718898188991890018901189021890318904189051890618907189081890918910189111891218913189141891518916189171891818919189201892118922189231892418925189261892718928189291893018931189321893318934189351893618937189381893918940189411894218943189441894518946189471894818949189501895118952189531895418955189561895718958189591896018961189621896318964189651896618967189681896918970189711897218973189741897518976189771897818979189801898118982189831898418985189861898718988189891899018991189921899318994189951899618997189981899919000190011900219003190041900519006190071900819009190101901119012190131901419015190161901719018190191902019021190221902319024190251902619027190281902919030190311903219033190341903519036190371903819039190401904119042190431904419045190461904719048190491905019051190521905319054190551905619057190581905919060190611906219063190641906519066190671906819069190701907119072190731907419075190761907719078190791908019081190821908319084190851908619087190881908919090190911909219093190941909519096190971909819099191001910119102191031910419105191061910719108191091911019111191121911319114191151911619117191181911919120191211912219123191241912519126191271912819129191301913119132191331913419135191361913719138191391914019141191421914319144191451914619147191481914919150191511915219153191541915519156191571915819159191601916119162191631916419165191661916719168191691917019171191721917319174191751917619177191781917919180191811918219183191841918519186191871918819189191901919119192191931919419195191961919719198191991920019201192021920319204192051920619207192081920919210192111921219213192141921519216192171921819219192201922119222192231922419225192261922719228192291923019231192321923319234192351923619237192381923919240192411924219243192441924519246192471924819249192501925119252192531925419255192561925719258192591926019261192621926319264192651926619267192681926919270192711927219273192741927519276192771927819279192801928119282192831928419285192861928719288192891929019291192921929319294192951929619297192981929919300193011930219303193041930519306193071930819309193101931119312193131931419315193161931719318193191932019321193221932319324193251932619327193281932919330193311933219333193341933519336193371933819339193401934119342193431934419345193461934719348193491935019351193521935319354193551935619357193581935919360193611936219363193641936519366193671936819369193701937119372193731937419375193761937719378193791938019381193821938319384193851938619387193881938919390193911939219393193941939519396193971939819399194001940119402194031940419405194061940719408194091941019411194121941319414194151941619417194181941919420194211942219423194241942519426194271942819429194301943119432194331943419435194361943719438194391944019441194421944319444194451944619447194481944919450194511945219453194541945519456194571945819459194601946119462194631946419465194661946719468194691947019471194721947319474194751947619477194781947919480194811948219483194841948519486194871948819489194901949119492194931949419495194961949719498194991950019501195021950319504195051950619507195081950919510195111951219513195141951519516195171951819519195201952119522195231952419525195261952719528195291953019531195321953319534195351953619537195381953919540195411954219543195441954519546195471954819549195501955119552195531955419555195561955719558195591956019561195621956319564195651956619567195681956919570195711957219573195741957519576195771957819579195801958119582195831958419585195861958719588195891959019591195921959319594195951959619597195981959919600196011960219603196041960519606196071960819609196101961119612196131961419615196161961719618196191962019621196221962319624196251962619627196281962919630196311963219633196341963519636196371963819639196401964119642196431964419645196461964719648196491965019651196521965319654196551965619657196581965919660196611966219663196641966519666196671966819669196701967119672196731967419675196761967719678196791968019681196821968319684196851968619687196881968919690196911969219693196941969519696196971969819699197001970119702197031970419705197061970719708197091971019711197121971319714197151971619717197181971919720197211972219723197241972519726197271972819729197301973119732197331973419735197361973719738197391974019741197421974319744197451974619747197481974919750197511975219753197541975519756197571975819759197601976119762197631976419765197661976719768197691977019771197721977319774197751977619777197781977919780197811978219783197841978519786197871978819789197901979119792197931979419795197961979719798197991980019801198021980319804198051980619807198081980919810198111981219813198141981519816198171981819819198201982119822198231982419825198261982719828198291983019831198321983319834198351983619837198381983919840198411984219843198441984519846198471984819849198501985119852198531985419855198561985719858198591986019861198621986319864198651986619867198681986919870198711987219873198741987519876198771987819879198801988119882198831988419885198861988719888198891989019891198921989319894198951989619897198981989919900199011990219903199041990519906199071990819909199101991119912199131991419915199161991719918199191992019921199221992319924199251992619927199281992919930199311993219933199341993519936199371993819939199401994119942199431994419945199461994719948199491995019951199521995319954199551995619957199581995919960199611996219963199641996519966199671996819969199701997119972199731997419975199761997719978199791998019981199821998319984199851998619987199881998919990199911999219993199941999519996199971999819999200002000120002200032000420005200062000720008200092001020011200122001320014200152001620017200182001920020200212002220023200242002520026200272002820029200302003120032200332003420035200362003720038200392004020041200422004320044200452004620047200482004920050200512005220053200542005520056200572005820059200602006120062200632006420065200662006720068200692007020071200722007320074200752007620077200782007920080200812008220083200842008520086200872008820089200902009120092200932009420095200962009720098200992010020101201022010320104201052010620107201082010920110201112011220113201142011520116201172011820119201202012120122201232012420125201262012720128201292013020131201322013320134201352013620137201382013920140201412014220143201442014520146201472014820149201502015120152201532015420155201562015720158201592016020161201622016320164201652016620167201682016920170201712017220173201742017520176201772017820179201802018120182201832018420185201862018720188201892019020191201922019320194201952019620197201982019920200202012020220203202042020520206202072020820209202102021120212202132021420215202162021720218202192022020221202222022320224202252022620227202282022920230202312023220233202342023520236202372023820239202402024120242202432024420245202462024720248202492025020251202522025320254202552025620257202582025920260202612026220263202642026520266202672026820269202702027120272202732027420275202762027720278202792028020281202822028320284202852028620287202882028920290202912029220293202942029520296202972029820299203002030120302203032030420305203062030720308203092031020311203122031320314203152031620317203182031920320203212032220323203242032520326203272032820329203302033120332203332033420335203362033720338203392034020341203422034320344203452034620347203482034920350203512035220353203542035520356203572035820359203602036120362203632036420365203662036720368203692037020371203722037320374203752037620377203782037920380203812038220383203842038520386203872038820389203902039120392203932039420395203962039720398203992040020401204022040320404204052040620407204082040920410204112041220413204142041520416204172041820419204202042120422204232042420425204262042720428204292043020431204322043320434204352043620437204382043920440204412044220443204442044520446204472044820449204502045120452204532045420455204562045720458204592046020461204622046320464204652046620467204682046920470204712047220473204742047520476204772047820479204802048120482204832048420485204862048720488204892049020491204922049320494204952049620497204982049920500205012050220503205042050520506205072050820509205102051120512205132051420515205162051720518205192052020521205222052320524205252052620527205282052920530205312053220533205342053520536205372053820539205402054120542205432054420545205462054720548205492055020551205522055320554205552055620557205582055920560205612056220563205642056520566205672056820569205702057120572205732057420575205762057720578205792058020581205822058320584205852058620587205882058920590205912059220593205942059520596205972059820599206002060120602206032060420605206062060720608206092061020611206122061320614206152061620617206182061920620206212062220623206242062520626206272062820629206302063120632206332063420635206362063720638206392064020641206422064320644206452064620647206482064920650206512065220653206542065520656206572065820659206602066120662206632066420665206662066720668206692067020671206722067320674206752067620677206782067920680206812068220683206842068520686206872068820689206902069120692206932069420695206962069720698206992070020701207022070320704207052070620707207082070920710207112071220713207142071520716207172071820719207202072120722207232072420725207262072720728207292073020731207322073320734207352073620737207382073920740207412074220743207442074520746207472074820749207502075120752207532075420755207562075720758207592076020761207622076320764207652076620767207682076920770207712077220773207742077520776207772077820779207802078120782207832078420785207862078720788207892079020791207922079320794207952079620797207982079920800208012080220803208042080520806208072080820809208102081120812208132081420815208162081720818208192082020821208222082320824208252082620827208282082920830208312083220833208342083520836208372083820839208402084120842208432084420845208462084720848208492085020851208522085320854208552085620857208582085920860208612086220863208642086520866208672086820869208702087120872208732087420875208762087720878208792088020881208822088320884208852088620887208882088920890208912089220893208942089520896208972089820899209002090120902209032090420905209062090720908209092091020911209122091320914209152091620917209182091920920209212092220923209242092520926209272092820929209302093120932209332093420935209362093720938209392094020941209422094320944209452094620947209482094920950209512095220953209542095520956209572095820959209602096120962209632096420965209662096720968209692097020971209722097320974209752097620977209782097920980209812098220983209842098520986209872098820989209902099120992209932099420995209962099720998209992100021001210022100321004210052100621007210082100921010210112101221013210142101521016210172101821019210202102121022210232102421025210262102721028210292103021031210322103321034210352103621037210382103921040210412104221043210442104521046210472104821049210502105121052210532105421055210562105721058210592106021061210622106321064210652106621067210682106921070210712107221073210742107521076210772107821079210802108121082210832108421085210862108721088210892109021091210922109321094210952109621097210982109921100211012110221103211042110521106211072110821109211102111121112211132111421115211162111721118211192112021121211222112321124211252112621127211282112921130211312113221133211342113521136211372113821139211402114121142211432114421145211462114721148211492115021151211522115321154211552115621157211582115921160211612116221163211642116521166211672116821169211702117121172211732117421175211762117721178211792118021181211822118321184211852118621187211882118921190211912119221193211942119521196211972119821199212002120121202212032120421205212062120721208212092121021211212122121321214212152121621217212182121921220212212122221223212242122521226212272122821229212302123121232212332123421235212362123721238212392124021241212422124321244212452124621247212482124921250212512125221253212542125521256212572125821259212602126121262212632126421265212662126721268212692127021271212722127321274212752127621277212782127921280212812128221283212842128521286212872128821289212902129121292212932129421295212962129721298212992130021301213022130321304213052130621307213082130921310213112131221313213142131521316213172131821319213202132121322213232132421325213262132721328213292133021331213322133321334213352133621337213382133921340213412134221343213442134521346213472134821349213502135121352213532135421355213562135721358213592136021361213622136321364213652136621367213682136921370213712137221373213742137521376213772137821379213802138121382213832138421385213862138721388213892139021391213922139321394213952139621397213982139921400214012140221403214042140521406214072140821409214102141121412214132141421415214162141721418214192142021421214222142321424214252142621427214282142921430214312143221433214342143521436214372143821439214402144121442214432144421445214462144721448214492145021451214522145321454214552145621457214582145921460214612146221463214642146521466214672146821469214702147121472214732147421475214762147721478214792148021481214822148321484214852148621487214882148921490214912149221493214942149521496214972149821499215002150121502215032150421505215062150721508215092151021511215122151321514215152151621517215182151921520215212152221523215242152521526215272152821529215302153121532215332153421535215362153721538215392154021541215422154321544215452154621547215482154921550215512155221553215542155521556215572155821559215602156121562215632156421565215662156721568215692157021571215722157321574215752157621577215782157921580215812158221583215842158521586215872158821589215902159121592215932159421595215962159721598215992160021601216022160321604216052160621607216082160921610216112161221613216142161521616216172161821619216202162121622216232162421625216262162721628216292163021631216322163321634216352163621637216382163921640216412164221643216442164521646216472164821649216502165121652216532165421655216562165721658216592166021661216622166321664216652166621667216682166921670216712167221673216742167521676216772167821679216802168121682216832168421685216862168721688216892169021691216922169321694216952169621697216982169921700217012170221703217042170521706217072170821709217102171121712217132171421715217162171721718217192172021721217222172321724217252172621727217282172921730217312173221733217342173521736217372173821739217402174121742217432174421745217462174721748217492175021751217522175321754217552175621757217582175921760217612176221763217642176521766217672176821769217702177121772217732177421775217762177721778217792178021781217822178321784217852178621787217882178921790217912179221793217942179521796217972179821799218002180121802218032180421805218062180721808218092181021811218122181321814218152181621817218182181921820218212182221823218242182521826218272182821829218302183121832218332183421835218362183721838218392184021841218422184321844218452184621847218482184921850218512185221853218542185521856218572185821859218602186121862218632186421865218662186721868218692187021871218722187321874218752187621877218782187921880218812188221883218842188521886218872188821889218902189121892218932189421895218962189721898218992190021901219022190321904219052190621907219082190921910219112191221913219142191521916219172191821919219202192121922219232192421925219262192721928219292193021931219322193321934219352193621937219382193921940219412194221943219442194521946219472194821949219502195121952219532195421955219562195721958219592196021961219622196321964219652196621967219682196921970219712197221973219742197521976219772197821979219802198121982219832198421985219862198721988219892199021991219922199321994219952199621997219982199922000220012200222003220042200522006220072200822009220102201122012220132201422015220162201722018220192202022021220222202322024220252202622027220282202922030220312203222033220342203522036220372203822039220402204122042220432204422045220462204722048220492205022051220522205322054220552205622057220582205922060220612206222063220642206522066220672206822069220702207122072220732207422075220762207722078220792208022081220822208322084220852208622087220882208922090220912209222093220942209522096220972209822099221002210122102221032210422105221062210722108221092211022111221122211322114221152211622117221182211922120221212212222123221242212522126221272212822129221302213122132221332213422135221362213722138221392214022141221422214322144221452214622147221482214922150221512215222153221542215522156221572215822159221602216122162221632216422165221662216722168221692217022171221722217322174221752217622177221782217922180221812218222183221842218522186221872218822189221902219122192221932219422195221962219722198221992220022201222022220322204222052220622207222082220922210222112221222213222142221522216222172221822219222202222122222222232222422225222262222722228222292223022231222322223322234222352223622237222382223922240222412224222243222442224522246222472224822249222502225122252222532225422255222562225722258222592226022261222622226322264222652226622267222682226922270222712227222273222742227522276222772227822279222802228122282222832228422285222862228722288222892229022291222922229322294222952229622297222982229922300223012230222303223042230522306223072230822309223102231122312223132231422315223162231722318223192232022321223222232322324223252232622327223282232922330223312233222333223342233522336223372233822339223402234122342223432234422345223462234722348223492235022351223522235322354223552235622357223582235922360223612236222363223642236522366223672236822369223702237122372223732237422375223762237722378223792238022381223822238322384223852238622387223882238922390223912239222393223942239522396223972239822399224002240122402224032240422405224062240722408224092241022411224122241322414224152241622417224182241922420224212242222423224242242522426224272242822429224302243122432224332243422435224362243722438224392244022441224422244322444224452244622447224482244922450224512245222453224542245522456224572245822459224602246122462224632246422465224662246722468224692247022471224722247322474224752247622477224782247922480224812248222483224842248522486224872248822489224902249122492224932249422495224962249722498224992250022501225022250322504225052250622507225082250922510225112251222513225142251522516225172251822519225202252122522225232252422525225262252722528225292253022531225322253322534225352253622537225382253922540225412254222543225442254522546225472254822549225502255122552225532255422555225562255722558225592256022561225622256322564225652256622567225682256922570225712257222573225742257522576225772257822579225802258122582225832258422585225862258722588225892259022591225922259322594225952259622597225982259922600226012260222603226042260522606226072260822609226102261122612226132261422615226162261722618226192262022621226222262322624226252262622627226282262922630226312263222633226342263522636226372263822639226402264122642226432264422645226462264722648226492265022651226522265322654226552265622657226582265922660226612266222663226642266522666226672266822669226702267122672226732267422675226762267722678226792268022681226822268322684226852268622687226882268922690226912269222693226942269522696226972269822699227002270122702227032270422705227062270722708227092271022711227122271322714227152271622717227182271922720227212272222723227242272522726227272272822729227302273122732227332273422735227362273722738227392274022741227422274322744227452274622747227482274922750227512275222753227542275522756227572275822759227602276122762227632276422765227662276722768227692277022771227722277322774227752277622777227782277922780227812278222783227842278522786227872278822789227902279122792227932279422795227962279722798227992280022801228022280322804228052280622807228082280922810228112281222813228142281522816228172281822819228202282122822228232282422825228262282722828228292283022831228322283322834228352283622837228382283922840228412284222843228442284522846228472284822849228502285122852228532285422855228562285722858228592286022861228622286322864228652286622867228682286922870228712287222873228742287522876228772287822879228802288122882228832288422885228862288722888228892289022891228922289322894228952289622897228982289922900229012290222903229042290522906229072290822909229102291122912229132291422915229162291722918229192292022921229222292322924229252292622927229282292922930229312293222933229342293522936229372293822939229402294122942229432294422945229462294722948229492295022951229522295322954229552295622957229582295922960229612296222963229642296522966229672296822969229702297122972229732297422975229762297722978229792298022981229822298322984229852298622987229882298922990229912299222993229942299522996229972299822999230002300123002230032300423005230062300723008230092301023011230122301323014230152301623017230182301923020230212302223023230242302523026230272302823029230302303123032230332303423035230362303723038230392304023041230422304323044230452304623047230482304923050230512305223053230542305523056230572305823059230602306123062230632306423065230662306723068230692307023071230722307323074230752307623077230782307923080230812308223083230842308523086230872308823089230902309123092230932309423095230962309723098230992310023101231022310323104231052310623107231082310923110231112311223113231142311523116231172311823119231202312123122231232312423125231262312723128231292313023131231322313323134231352313623137231382313923140231412314223143231442314523146231472314823149231502315123152231532315423155231562315723158231592316023161231622316323164231652316623167231682316923170231712317223173231742317523176231772317823179231802318123182231832318423185231862318723188231892319023191231922319323194231952319623197231982319923200232012320223203232042320523206232072320823209232102321123212232132321423215232162321723218232192322023221232222322323224232252322623227232282322923230232312323223233232342323523236232372323823239232402324123242232432324423245232462324723248232492325023251232522325323254232552325623257232582325923260232612326223263232642326523266232672326823269232702327123272232732327423275232762327723278232792328023281232822328323284232852328623287232882328923290232912329223293232942329523296232972329823299233002330123302233032330423305233062330723308233092331023311233122331323314233152331623317233182331923320233212332223323233242332523326233272332823329233302333123332233332333423335233362333723338233392334023341233422334323344233452334623347233482334923350233512335223353233542335523356233572335823359233602336123362233632336423365233662336723368233692337023371233722337323374233752337623377233782337923380233812338223383233842338523386233872338823389233902339123392233932339423395233962339723398233992340023401234022340323404234052340623407234082340923410234112341223413234142341523416234172341823419234202342123422234232342423425234262342723428234292343023431234322343323434234352343623437234382343923440234412344223443234442344523446234472344823449234502345123452234532345423455234562345723458234592346023461234622346323464234652346623467234682346923470234712347223473234742347523476234772347823479234802348123482234832348423485234862348723488234892349023491234922349323494234952349623497234982349923500235012350223503235042350523506235072350823509235102351123512235132351423515235162351723518235192352023521235222352323524235252352623527235282352923530235312353223533235342353523536235372353823539235402354123542235432354423545235462354723548235492355023551235522355323554235552355623557235582355923560235612356223563235642356523566235672356823569235702357123572235732357423575235762357723578235792358023581235822358323584235852358623587235882358923590235912359223593235942359523596235972359823599236002360123602236032360423605236062360723608236092361023611236122361323614236152361623617236182361923620236212362223623236242362523626236272362823629236302363123632236332363423635236362363723638236392364023641236422364323644236452364623647236482364923650236512365223653236542365523656236572365823659236602366123662236632366423665236662366723668236692367023671236722367323674236752367623677236782367923680236812368223683236842368523686236872368823689236902369123692236932369423695236962369723698236992370023701237022370323704237052370623707237082370923710237112371223713237142371523716237172371823719237202372123722237232372423725237262372723728237292373023731237322373323734237352373623737237382373923740237412374223743237442374523746237472374823749237502375123752237532375423755237562375723758237592376023761237622376323764237652376623767237682376923770237712377223773237742377523776237772377823779237802378123782237832378423785237862378723788237892379023791237922379323794237952379623797237982379923800238012380223803238042380523806238072380823809238102381123812238132381423815238162381723818238192382023821238222382323824238252382623827238282382923830238312383223833238342383523836238372383823839238402384123842238432384423845238462384723848238492385023851238522385323854238552385623857238582385923860238612386223863238642386523866238672386823869238702387123872238732387423875238762387723878238792388023881238822388323884238852388623887238882388923890238912389223893238942389523896238972389823899239002390123902239032390423905239062390723908239092391023911239122391323914239152391623917239182391923920239212392223923239242392523926239272392823929239302393123932239332393423935239362393723938239392394023941239422394323944239452394623947239482394923950239512395223953239542395523956239572395823959239602396123962239632396423965239662396723968239692397023971239722397323974239752397623977239782397923980239812398223983239842398523986239872398823989239902399123992239932399423995239962399723998239992400024001240022400324004240052400624007240082400924010240112401224013240142401524016240172401824019240202402124022240232402424025240262402724028240292403024031240322403324034240352403624037240382403924040240412404224043240442404524046240472404824049240502405124052240532405424055240562405724058240592406024061240622406324064240652406624067240682406924070240712407224073240742407524076240772407824079240802408124082240832408424085240862408724088240892409024091240922409324094240952409624097240982409924100241012410224103241042410524106241072410824109241102411124112241132411424115241162411724118241192412024121241222412324124241252412624127241282412924130241312413224133241342413524136241372413824139241402414124142241432414424145241462414724148241492415024151241522415324154241552415624157241582415924160241612416224163241642416524166241672416824169241702417124172241732417424175241762417724178241792418024181241822418324184241852418624187241882418924190241912419224193241942419524196241972419824199242002420124202242032420424205242062420724208242092421024211242122421324214242152421624217242182421924220242212422224223242242422524226242272422824229242302423124232242332423424235242362423724238242392424024241242422424324244242452424624247242482424924250242512425224253242542425524256242572425824259242602426124262242632426424265242662426724268242692427024271242722427324274242752427624277242782427924280242812428224283242842428524286242872428824289242902429124292242932429424295242962429724298242992430024301243022430324304243052430624307243082430924310243112431224313243142431524316243172431824319243202432124322243232432424325243262432724328243292433024331243322433324334243352433624337243382433924340243412434224343243442434524346243472434824349243502435124352243532435424355243562435724358243592436024361243622436324364243652436624367243682436924370243712437224373243742437524376243772437824379243802438124382243832438424385243862438724388243892439024391243922439324394243952439624397243982439924400244012440224403244042440524406244072440824409244102441124412244132441424415244162441724418244192442024421244222442324424244252442624427244282442924430244312443224433244342443524436244372443824439244402444124442244432444424445244462444724448244492445024451244522445324454244552445624457244582445924460244612446224463244642446524466244672446824469244702447124472244732447424475244762447724478244792448024481244822448324484244852448624487244882448924490244912449224493244942449524496244972449824499245002450124502245032450424505245062450724508245092451024511245122451324514245152451624517245182451924520245212452224523245242452524526245272452824529245302453124532245332453424535245362453724538245392454024541245422454324544245452454624547245482454924550245512455224553245542455524556245572455824559245602456124562245632456424565245662456724568245692457024571245722457324574245752457624577245782457924580245812458224583245842458524586245872458824589245902459124592245932459424595245962459724598245992460024601246022460324604246052460624607246082460924610246112461224613246142461524616246172461824619246202462124622246232462424625246262462724628246292463024631246322463324634246352463624637246382463924640246412464224643246442464524646246472464824649246502465124652246532465424655246562465724658246592466024661246622466324664246652466624667246682466924670246712467224673246742467524676246772467824679246802468124682246832468424685246862468724688246892469024691246922469324694246952469624697246982469924700247012470224703247042470524706247072470824709247102471124712247132471424715247162471724718247192472024721247222472324724247252472624727247282472924730247312473224733247342473524736247372473824739247402474124742247432474424745247462474724748247492475024751247522475324754247552475624757247582475924760247612476224763247642476524766247672476824769247702477124772247732477424775247762477724778247792478024781247822478324784247852478624787247882478924790247912479224793247942479524796247972479824799248002480124802248032480424805248062480724808248092481024811248122481324814248152481624817248182481924820248212482224823248242482524826248272482824829248302483124832248332483424835248362483724838248392484024841248422484324844248452484624847248482484924850248512485224853248542485524856248572485824859248602486124862248632486424865248662486724868248692487024871248722487324874248752487624877248782487924880248812488224883248842488524886248872488824889248902489124892248932489424895248962489724898248992490024901249022490324904249052490624907249082490924910249112491224913249142491524916249172491824919249202492124922249232492424925249262492724928249292493024931249322493324934249352493624937249382493924940249412494224943249442494524946249472494824949249502495124952249532495424955249562495724958249592496024961249622496324964249652496624967249682496924970249712497224973249742497524976249772497824979249802498124982249832498424985249862498724988249892499024991249922499324994249952499624997249982499925000250012500225003250042500525006250072500825009250102501125012250132501425015250162501725018250192502025021250222502325024250252502625027250282502925030250312503225033250342503525036250372503825039250402504125042250432504425045250462504725048250492505025051250522505325054250552505625057250582505925060250612506225063250642506525066250672506825069250702507125072250732507425075250762507725078250792508025081250822508325084250852508625087250882508925090250912509225093250942509525096250972509825099251002510125102251032510425105251062510725108251092511025111251122511325114251152511625117251182511925120251212512225123251242512525126251272512825129251302513125132251332513425135251362513725138251392514025141251422514325144251452514625147251482514925150251512515225153251542515525156251572515825159251602516125162251632516425165251662516725168251692517025171251722517325174251752517625177251782517925180251812518225183251842518525186251872518825189251902519125192251932519425195251962519725198251992520025201252022520325204252052520625207252082520925210252112521225213252142521525216252172521825219252202522125222252232522425225252262522725228252292523025231252322523325234252352523625237252382523925240252412524225243252442524525246252472524825249252502525125252252532525425255252562525725258252592526025261252622526325264252652526625267252682526925270252712527225273252742527525276252772527825279252802528125282252832528425285252862528725288252892529025291252922529325294252952529625297252982529925300253012530225303253042530525306253072530825309253102531125312253132531425315253162531725318253192532025321253222532325324253252532625327253282532925330253312533225333253342533525336253372533825339253402534125342253432534425345253462534725348253492535025351253522535325354253552535625357253582535925360253612536225363253642536525366253672536825369253702537125372253732537425375253762537725378253792538025381253822538325384253852538625387253882538925390253912539225393253942539525396253972539825399254002540125402254032540425405254062540725408254092541025411254122541325414254152541625417254182541925420254212542225423254242542525426254272542825429254302543125432254332543425435254362543725438254392544025441254422544325444254452544625447254482544925450254512545225453254542545525456254572545825459254602546125462254632546425465254662546725468254692547025471254722547325474254752547625477254782547925480254812548225483254842548525486254872548825489254902549125492254932549425495254962549725498254992550025501255022550325504255052550625507255082550925510255112551225513255142551525516255172551825519255202552125522255232552425525255262552725528255292553025531255322553325534255352553625537255382553925540255412554225543255442554525546255472554825549255502555125552255532555425555255562555725558255592556025561255622556325564255652556625567255682556925570255712557225573255742557525576255772557825579255802558125582255832558425585255862558725588255892559025591255922559325594255952559625597255982559925600256012560225603256042560525606256072560825609256102561125612256132561425615256162561725618256192562025621256222562325624256252562625627256282562925630256312563225633256342563525636256372563825639256402564125642256432564425645256462564725648256492565025651256522565325654256552565625657256582565925660256612566225663256642566525666256672566825669256702567125672256732567425675256762567725678256792568025681256822568325684256852568625687256882568925690256912569225693256942569525696256972569825699257002570125702257032570425705257062570725708257092571025711257122571325714257152571625717257182571925720257212572225723257242572525726257272572825729257302573125732257332573425735257362573725738257392574025741257422574325744257452574625747257482574925750257512575225753257542575525756257572575825759257602576125762257632576425765257662576725768257692577025771257722577325774257752577625777257782577925780257812578225783257842578525786257872578825789257902579125792257932579425795257962579725798257992580025801258022580325804258052580625807258082580925810258112581225813258142581525816258172581825819258202582125822258232582425825258262582725828258292583025831258322583325834258352583625837258382583925840258412584225843258442584525846258472584825849258502585125852258532585425855258562585725858258592586025861258622586325864258652586625867258682586925870258712587225873258742587525876258772587825879258802588125882258832588425885258862588725888258892589025891258922589325894258952589625897258982589925900259012590225903259042590525906259072590825909259102591125912259132591425915259162591725918259192592025921259222592325924259252592625927259282592925930259312593225933259342593525936259372593825939259402594125942259432594425945259462594725948259492595025951259522595325954259552595625957259582595925960259612596225963259642596525966259672596825969259702597125972259732597425975259762597725978259792598025981259822598325984259852598625987259882598925990259912599225993
  1. diff -Nur gcc-4.4.6.orig/gcc/builtins.c gcc-4.4.6/gcc/builtins.c
  2. --- gcc-4.4.6.orig/gcc/builtins.c 2010-12-07 19:56:56.000000000 +0100
  3. +++ gcc-4.4.6/gcc/builtins.c 2011-10-22 19:23:08.512581300 +0200
  4. @@ -11108,7 +11108,7 @@
  5. do
  6. {
  7. - code = va_arg (ap, enum tree_code);
  8. + code = va_arg (ap, int);
  9. switch (code)
  10. {
  11. case 0:
  12. diff -Nur gcc-4.4.6.orig/gcc/calls.c gcc-4.4.6/gcc/calls.c
  13. --- gcc-4.4.6.orig/gcc/calls.c 2010-09-24 17:07:36.000000000 +0200
  14. +++ gcc-4.4.6/gcc/calls.c 2011-10-22 19:23:08.512581300 +0200
  15. @@ -3447,7 +3447,7 @@
  16. for (; count < nargs; count++)
  17. {
  18. rtx val = va_arg (p, rtx);
  19. - enum machine_mode mode = va_arg (p, enum machine_mode);
  20. + enum machine_mode mode = va_arg (p, int);
  21. /* We cannot convert the arg value to the mode the library wants here;
  22. must do it earlier where we know the signedness of the arg. */
  23. diff -Nur gcc-4.4.6.orig/gcc/config/avr32/avr32-elf.h gcc-4.4.6/gcc/config/avr32/avr32-elf.h
  24. --- gcc-4.4.6.orig/gcc/config/avr32/avr32-elf.h 1970-01-01 01:00:00.000000000 +0100
  25. +++ gcc-4.4.6/gcc/config/avr32/avr32-elf.h 2011-10-22 19:23:08.516581300 +0200
  26. @@ -0,0 +1,91 @@
  27. +/*
  28. + Elf specific definitions.
  29. + Copyright 2003,2004,2005,2006,2007,2008,2009 Atmel Corporation.
  30. +
  31. + This file is part of GCC.
  32. +
  33. + This program is free software; you can redistribute it and/or modify
  34. + it under the terms of the GNU General Public License as published by
  35. + the Free Software Foundation; either version 2 of the License, or
  36. + (at your option) any later version.
  37. +
  38. + This program is distributed in the hope that it will be useful,
  39. + but WITHOUT ANY WARRANTY; without even the implied warranty of
  40. + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  41. + GNU General Public License for more details.
  42. +
  43. + You should have received a copy of the GNU General Public License
  44. + along with this program; if not, write to the Free Software
  45. + Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. */
  46. +
  47. +
  48. +/*****************************************************************************
  49. + * Controlling the Compiler Driver, 'gcc'
  50. + *****************************************************************************/
  51. +
  52. +/* Run-time Target Specification. */
  53. +#undef TARGET_VERSION
  54. +#define TARGET_VERSION fputs (" (AVR32 GNU with ELF)", stderr);
  55. +
  56. +/*
  57. +Another C string constant used much like LINK_SPEC. The
  58. +difference between the two is that STARTFILE_SPEC is used at
  59. +the very beginning of the command given to the linker.
  60. +
  61. +If this macro is not defined, a default is provided that loads the
  62. +standard C startup file from the usual place. See gcc.c.
  63. +*/
  64. +#if 0
  65. +#undef STARTFILE_SPEC
  66. +#define STARTFILE_SPEC "crt0%O%s crti%O%s crtbegin%O%s"
  67. +#endif
  68. +#undef STARTFILE_SPEC
  69. +#define STARTFILE_SPEC "%{mflashvault: crtfv.o%s} %{!mflashvault: crt0.o%s} \
  70. + crti.o%s crtbegin.o%s"
  71. +
  72. +#undef LINK_SPEC
  73. +#define LINK_SPEC "%{muse-oscall:--defsym __do_not_use_oscall_coproc__=0} %{mrelax|O*:%{mno-relax|O0|O1: ;:--relax}} %{mpart=uc3a3revd:-mavr32elf_uc3a3256s;:%{mpart=*:-mavr32elf_%*}} %{mcpu=*:-mavr32elf_%*}"
  74. +
  75. +
  76. +/*
  77. +Another C string constant used much like LINK_SPEC. The
  78. +difference between the two is that ENDFILE_SPEC is used at
  79. +the very end of the command given to the linker.
  80. +
  81. +Do not define this macro if it does not need to do anything.
  82. +*/
  83. +#undef ENDFILE_SPEC
  84. +#define ENDFILE_SPEC "crtend%O%s crtn%O%s"
  85. +
  86. +
  87. +/* Target CPU builtins. */
  88. +#define TARGET_CPU_CPP_BUILTINS() \
  89. + do \
  90. + { \
  91. + builtin_define ("__avr32__"); \
  92. + builtin_define ("__AVR32__"); \
  93. + builtin_define ("__AVR32_ELF__"); \
  94. + builtin_define (avr32_part->macro); \
  95. + builtin_define (avr32_arch->macro); \
  96. + if (avr32_arch->uarch_type == UARCH_TYPE_AVR32A) \
  97. + builtin_define ("__AVR32_AVR32A__"); \
  98. + else \
  99. + builtin_define ("__AVR32_AVR32B__"); \
  100. + if (TARGET_UNALIGNED_WORD) \
  101. + builtin_define ("__AVR32_HAS_UNALIGNED_WORD__"); \
  102. + if (TARGET_SIMD) \
  103. + builtin_define ("__AVR32_HAS_SIMD__"); \
  104. + if (TARGET_DSP) \
  105. + builtin_define ("__AVR32_HAS_DSP__"); \
  106. + if (TARGET_RMW) \
  107. + builtin_define ("__AVR32_HAS_RMW__"); \
  108. + if (TARGET_BRANCH_PRED) \
  109. + builtin_define ("__AVR32_HAS_BRANCH_PRED__"); \
  110. + if (TARGET_FAST_FLOAT) \
  111. + builtin_define ("__AVR32_FAST_FLOAT__"); \
  112. + if (TARGET_FLASHVAULT) \
  113. + builtin_define ("__AVR32_FLASHVAULT__"); \
  114. + if (TARGET_NO_MUL_INSNS) \
  115. + builtin_define ("__AVR32_NO_MUL__"); \
  116. + } \
  117. + while (0)
  118. diff -Nur gcc-4.4.6.orig/gcc/config/avr32/avr32-modes.def gcc-4.4.6/gcc/config/avr32/avr32-modes.def
  119. --- gcc-4.4.6.orig/gcc/config/avr32/avr32-modes.def 1970-01-01 01:00:00.000000000 +0100
  120. +++ gcc-4.4.6/gcc/config/avr32/avr32-modes.def 2011-10-22 19:23:08.524581303 +0200
  121. @@ -0,0 +1 @@
  122. +VECTOR_MODES (INT, 4); /* V4QI V2HI */
  123. diff -Nur gcc-4.4.6.orig/gcc/config/avr32/avr32-protos.h gcc-4.4.6/gcc/config/avr32/avr32-protos.h
  124. --- gcc-4.4.6.orig/gcc/config/avr32/avr32-protos.h 1970-01-01 01:00:00.000000000 +0100
  125. +++ gcc-4.4.6/gcc/config/avr32/avr32-protos.h 2011-10-22 19:23:08.524581303 +0200
  126. @@ -0,0 +1,196 @@
  127. +/*
  128. + Prototypes for exported functions defined in avr32.c
  129. + Copyright 2003,2004,2005,2006,2007,2008,2009 Atmel Corporation.
  130. +
  131. + This file is part of GCC.
  132. +
  133. + This program is free software; you can redistribute it and/or modify
  134. + it under the terms of the GNU General Public License as published by
  135. + the Free Software Foundation; either version 2 of the License, or
  136. + (at your option) any later version.
  137. +
  138. + This program is distributed in the hope that it will be useful,
  139. + but WITHOUT ANY WARRANTY; without even the implied warranty of
  140. + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  141. + GNU General Public License for more details.
  142. +
  143. + You should have received a copy of the GNU General Public License
  144. + along with this program; if not, write to the Free Software
  145. + Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. */
  146. +
  147. +
  148. +#ifndef AVR32_PROTOS_H
  149. +#define AVR32_PROTOS_H
  150. +
  151. +extern const int swap_reg[];
  152. +
  153. +extern int avr32_valid_macmac_bypass (rtx, rtx);
  154. +extern int avr32_valid_mulmac_bypass (rtx, rtx);
  155. +
  156. +extern int avr32_decode_lcomm_symbol_offset (rtx, int *);
  157. +extern void avr32_encode_lcomm_symbol_offset (tree, char *, int);
  158. +
  159. +extern const char *avr32_strip_name_encoding (const char *);
  160. +
  161. +extern rtx avr32_get_note_reg_equiv (rtx insn);
  162. +
  163. +extern int avr32_use_return_insn (int iscond);
  164. +
  165. +extern void avr32_make_reglist16 (int reglist16_vect, char *reglist16_string);
  166. +
  167. +extern void avr32_make_reglist8 (int reglist8_vect, char *reglist8_string);
  168. +extern void avr32_make_fp_reglist_w (int reglist_mask, char *reglist_string);
  169. +extern void avr32_make_fp_reglist_d (int reglist_mask, char *reglist_string);
  170. +
  171. +extern void avr32_output_return_instruction (int single_ret_inst,
  172. + int iscond, rtx cond,
  173. + rtx r12_imm);
  174. +extern void avr32_expand_prologue (void);
  175. +extern void avr32_set_return_address (rtx source, rtx scratch);
  176. +
  177. +extern int avr32_hard_regno_mode_ok (int regno, enum machine_mode mode);
  178. +extern int avr32_extra_constraint_s (rtx value, const int strict);
  179. +extern int avr32_eh_return_data_regno (const int n);
  180. +extern int avr32_initial_elimination_offset (const int from, const int to);
  181. +extern rtx avr32_function_arg (CUMULATIVE_ARGS * cum, enum machine_mode mode,
  182. + tree type, int named);
  183. +extern void avr32_init_cumulative_args (CUMULATIVE_ARGS * cum, tree fntype,
  184. + rtx libname, tree fndecl);
  185. +extern void avr32_function_arg_advance (CUMULATIVE_ARGS * cum,
  186. + enum machine_mode mode,
  187. + tree type, int named);
  188. +#ifdef ARGS_SIZE_RTX
  189. +/* expr.h defines ARGS_SIZE_RTX and `enum direction'. */
  190. +extern enum direction avr32_function_arg_padding (enum machine_mode mode,
  191. + tree type);
  192. +#endif /* ARGS_SIZE_RTX */
  193. +extern rtx avr32_function_value (tree valtype, tree func, bool outgoing);
  194. +extern rtx avr32_libcall_value (enum machine_mode mode);
  195. +extern int avr32_sched_use_dfa_pipeline_interface (void);
  196. +extern bool avr32_return_in_memory (tree type, tree fntype);
  197. +extern void avr32_regs_to_save (char *operand);
  198. +extern void avr32_target_asm_function_prologue (FILE * file,
  199. + HOST_WIDE_INT size);
  200. +extern void avr32_target_asm_function_epilogue (FILE * file,
  201. + HOST_WIDE_INT size);
  202. +extern void avr32_trampoline_template (FILE * file);
  203. +extern void avr32_initialize_trampoline (rtx addr, rtx fnaddr,
  204. + rtx static_chain);
  205. +extern int avr32_legitimate_address (enum machine_mode mode, rtx x,
  206. + int strict);
  207. +extern int avr32_legitimate_constant_p (rtx x);
  208. +
  209. +extern int avr32_legitimate_pic_operand_p (rtx x);
  210. +
  211. +extern rtx avr32_find_symbol (rtx x);
  212. +extern void avr32_select_section (rtx exp, int reloc, int align);
  213. +extern void avr32_encode_section_info (tree decl, rtx rtl, int first);
  214. +extern void avr32_asm_file_end (FILE * stream);
  215. +extern void avr32_asm_output_ascii (FILE * stream, char *ptr, int len);
  216. +extern void avr32_asm_output_common (FILE * stream, const char *name,
  217. + int size, int rounded);
  218. +extern void avr32_asm_output_label (FILE * stream, const char *name);
  219. +extern void avr32_asm_declare_object_name (FILE * stream, char *name,
  220. + tree decl);
  221. +extern void avr32_asm_globalize_label (FILE * stream, const char *name);
  222. +extern void avr32_asm_weaken_label (FILE * stream, const char *name);
  223. +extern void avr32_asm_output_external (FILE * stream, tree decl,
  224. + const char *name);
  225. +extern void avr32_asm_output_external_libcall (FILE * stream, rtx symref);
  226. +extern void avr32_asm_output_labelref (FILE * stream, const char *name);
  227. +extern void avr32_notice_update_cc (rtx exp, rtx insn);
  228. +extern void avr32_print_operand (FILE * stream, rtx x, int code);
  229. +extern void avr32_print_operand_address (FILE * stream, rtx x);
  230. +
  231. +extern int avr32_symbol (rtx x);
  232. +
  233. +extern void avr32_select_rtx_section (enum machine_mode mode, rtx x,
  234. + unsigned HOST_WIDE_INT align);
  235. +
  236. +extern int avr32_load_multiple_operation (rtx op, enum machine_mode mode);
  237. +extern int avr32_store_multiple_operation (rtx op, enum machine_mode mode);
  238. +
  239. +extern int avr32_const_ok_for_constraint_p (HOST_WIDE_INT value, char c,
  240. + const char *str);
  241. +
  242. +extern bool avr32_cannot_force_const_mem (rtx x);
  243. +
  244. +extern void avr32_init_builtins (void);
  245. +
  246. +extern rtx avr32_expand_builtin (tree exp, rtx target, rtx subtarget,
  247. + enum machine_mode mode, int ignore);
  248. +
  249. +extern bool avr32_must_pass_in_stack (enum machine_mode mode, tree type);
  250. +
  251. +extern bool avr32_strict_argument_naming (CUMULATIVE_ARGS * ca);
  252. +
  253. +extern bool avr32_pass_by_reference (CUMULATIVE_ARGS * cum,
  254. + enum machine_mode mode,
  255. + tree type, bool named);
  256. +
  257. +extern rtx avr32_gen_load_multiple (rtx * regs, int count, rtx from,
  258. + int write_back, int in_struct_p,
  259. + int scalar_p);
  260. +extern rtx avr32_gen_store_multiple (rtx * regs, int count, rtx to,
  261. + int in_struct_p, int scalar_p);
  262. +extern int avr32_gen_movmemsi (rtx * operands);
  263. +
  264. +extern int avr32_rnd_operands (rtx add, rtx shift);
  265. +extern int avr32_adjust_insn_length (rtx insn, int length);
  266. +
  267. +extern int symbol_mentioned_p (rtx x);
  268. +extern int label_mentioned_p (rtx x);
  269. +extern rtx legitimize_pic_address (rtx orig, enum machine_mode mode, rtx reg);
  270. +extern int avr32_address_register_rtx_p (rtx x, int strict_p);
  271. +extern int avr32_legitimate_index_p (enum machine_mode mode, rtx index,
  272. + int strict_p);
  273. +
  274. +extern int avr32_const_double_immediate (rtx value);
  275. +extern void avr32_init_expanders (void);
  276. +extern rtx avr32_return_addr (int count, rtx frame);
  277. +extern bool avr32_got_mentioned_p (rtx addr);
  278. +
  279. +extern void avr32_final_prescan_insn (rtx insn, rtx * opvec, int noperands);
  280. +
  281. +extern int avr32_expand_movcc (enum machine_mode mode, rtx operands[]);
  282. +extern int avr32_expand_addcc (enum machine_mode mode, rtx operands[]);
  283. +#ifdef RTX_CODE
  284. +extern int avr32_expand_scc (RTX_CODE cond, rtx * operands);
  285. +#endif
  286. +
  287. +extern int avr32_store_bypass (rtx insn_out, rtx insn_in);
  288. +extern int avr32_mul_waw_bypass (rtx insn_out, rtx insn_in);
  289. +extern int avr32_valid_load_double_bypass (rtx insn_out, rtx insn_in);
  290. +extern int avr32_valid_load_quad_bypass (rtx insn_out, rtx insn_in);
  291. +extern rtx avr32_output_cmp (rtx cond, enum machine_mode mode,
  292. + rtx op0, rtx op1);
  293. +
  294. +rtx get_next_insn_cond (rtx cur_insn);
  295. +int set_next_insn_cond (rtx cur_insn, rtx cond);
  296. +rtx next_insn_emits_cmp (rtx cur_insn);
  297. +void avr32_override_options (void);
  298. +void avr32_load_pic_register (void);
  299. +#ifdef GCC_BASIC_BLOCK_H
  300. +rtx avr32_ifcvt_modify_insn (ce_if_block_t *ce_info, rtx pattern, rtx insn,
  301. + int *num_true_changes);
  302. +rtx avr32_ifcvt_modify_test (ce_if_block_t *ce_info, rtx test );
  303. +void avr32_ifcvt_modify_cancel ( ce_if_block_t *ce_info, int *num_true_changes);
  304. +#endif
  305. +void avr32_optimization_options (int level, int size);
  306. +int avr32_const_ok_for_move (HOST_WIDE_INT c);
  307. +
  308. +void avr32_split_const_expr (enum machine_mode mode,
  309. + enum machine_mode new_mode,
  310. + rtx expr,
  311. + rtx *split_expr);
  312. +void avr32_get_intval (enum machine_mode mode,
  313. + rtx const_expr,
  314. + HOST_WIDE_INT *val);
  315. +
  316. +int avr32_cond_imm_clobber_splittable (rtx insn,
  317. + rtx operands[]);
  318. +
  319. +bool avr32_flashvault_call(tree decl);
  320. +extern void avr32_emit_swdivsf (rtx, rtx, rtx);
  321. +
  322. +#endif /* AVR32_PROTOS_H */
  323. diff -Nur gcc-4.4.6.orig/gcc/config/avr32/avr32.c gcc-4.4.6/gcc/config/avr32/avr32.c
  324. --- gcc-4.4.6.orig/gcc/config/avr32/avr32.c 1970-01-01 01:00:00.000000000 +0100
  325. +++ gcc-4.4.6/gcc/config/avr32/avr32.c 2011-10-22 19:23:08.516581300 +0200
  326. @@ -0,0 +1,8087 @@
  327. +/*
  328. + Target hooks and helper functions for AVR32.
  329. + Copyright 2003,2004,2005,2006,2007,2008,2009,2010 Atmel Corporation.
  330. +
  331. + This file is part of GCC.
  332. +
  333. + This program is free software; you can redistribute it and/or modify
  334. + it under the terms of the GNU General Public License as published by
  335. + the Free Software Foundation; either version 2 of the License, or
  336. + (at your option) any later version.
  337. +
  338. + This program is distributed in the hope that it will be useful,
  339. + but WITHOUT ANY WARRANTY; without even the implied warranty of
  340. + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  341. + GNU General Public License for more details.
  342. +
  343. + You should have received a copy of the GNU General Public License
  344. + along with this program; if not, write to the Free Software
  345. + Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. */
  346. +
  347. +#include "config.h"
  348. +#include "system.h"
  349. +#include "coretypes.h"
  350. +#include "tm.h"
  351. +#include "rtl.h"
  352. +#include "tree.h"
  353. +#include "obstack.h"
  354. +#include "regs.h"
  355. +#include "hard-reg-set.h"
  356. +#include "real.h"
  357. +#include "insn-config.h"
  358. +#include "conditions.h"
  359. +#include "output.h"
  360. +#include "insn-attr.h"
  361. +#include "flags.h"
  362. +#include "reload.h"
  363. +#include "function.h"
  364. +#include "expr.h"
  365. +#include "optabs.h"
  366. +#include "toplev.h"
  367. +#include "recog.h"
  368. +#include "ggc.h"
  369. +#include "except.h"
  370. +#include "c-pragma.h"
  371. +#include "integrate.h"
  372. +#include "tm_p.h"
  373. +#include "langhooks.h"
  374. +#include "hooks.h"
  375. +#include "df.h"
  376. +
  377. +#include "target.h"
  378. +#include "target-def.h"
  379. +
  380. +#include <ctype.h>
  381. +
  382. +
  383. +
  384. +/* Global variables. */
  385. +typedef struct minipool_node Mnode;
  386. +typedef struct minipool_fixup Mfix;
  387. +
  388. +/* Obstack for minipool constant handling. */
  389. +static struct obstack minipool_obstack;
  390. +static char *minipool_startobj;
  391. +static rtx minipool_vector_label;
  392. +
  393. +/* True if we are currently building a constant table. */
  394. +int making_const_table;
  395. +
  396. +tree fndecl_attribute_args = NULL_TREE;
  397. +
  398. +
  399. +/* Function prototypes. */
  400. +static unsigned long avr32_isr_value (tree);
  401. +static unsigned long avr32_compute_func_type (void);
  402. +static tree avr32_handle_isr_attribute (tree *, tree, tree, int, bool *);
  403. +static tree avr32_handle_acall_attribute (tree *, tree, tree, int, bool *);
  404. +static tree avr32_handle_fndecl_attribute (tree * node, tree name, tree args,
  405. + int flags, bool * no_add_attrs);
  406. +static void avr32_reorg (void);
  407. +bool avr32_return_in_msb (tree type);
  408. +bool avr32_vector_mode_supported (enum machine_mode mode);
  409. +static void avr32_init_libfuncs (void);
  410. +static void avr32_file_end (void);
  411. +static void flashvault_decl_list_add (unsigned int vector_num, const char *name);
  412. +
  413. +
  414. +
  415. +static void
  416. +avr32_add_gc_roots (void)
  417. +{
  418. + gcc_obstack_init (&minipool_obstack);
  419. + minipool_startobj = (char *) obstack_alloc (&minipool_obstack, 0);
  420. +}
  421. +
  422. +
  423. +/* List of all known AVR32 parts */
  424. +static const struct part_type_s avr32_part_types[] = {
  425. + /* name, part_type, architecture type, macro */
  426. + {"none", PART_TYPE_AVR32_NONE, ARCH_TYPE_AVR32_AP, "__AVR32__"},
  427. + {"ap7000", PART_TYPE_AVR32_AP7000, ARCH_TYPE_AVR32_AP, "__AVR32_AP7000__"},
  428. + {"ap7001", PART_TYPE_AVR32_AP7001, ARCH_TYPE_AVR32_AP, "__AVR32_AP7001__"},
  429. + {"ap7002", PART_TYPE_AVR32_AP7002, ARCH_TYPE_AVR32_AP, "__AVR32_AP7002__"},
  430. + {"ap7200", PART_TYPE_AVR32_AP7200, ARCH_TYPE_AVR32_AP, "__AVR32_AP7200__"},
  431. + {"uc3a0128", PART_TYPE_AVR32_UC3A0128, ARCH_TYPE_AVR32_UCR2, "__AVR32_UC3A0128__"},
  432. + {"uc3a0256", PART_TYPE_AVR32_UC3A0256, ARCH_TYPE_AVR32_UCR2, "__AVR32_UC3A0256__"},
  433. + {"uc3a0512", PART_TYPE_AVR32_UC3A0512, ARCH_TYPE_AVR32_UCR2, "__AVR32_UC3A0512__"},
  434. + {"uc3a0512es", PART_TYPE_AVR32_UC3A0512ES, ARCH_TYPE_AVR32_UCR1, "__AVR32_UC3A0512ES__"},
  435. + {"uc3a1128", PART_TYPE_AVR32_UC3A1128, ARCH_TYPE_AVR32_UCR2, "__AVR32_UC3A1128__"},
  436. + {"uc3a1256", PART_TYPE_AVR32_UC3A1256, ARCH_TYPE_AVR32_UCR2, "__AVR32_UC3A1256__"},
  437. + {"uc3a1512", PART_TYPE_AVR32_UC3A1512, ARCH_TYPE_AVR32_UCR2, "__AVR32_UC3A1512__"},
  438. + {"uc3a1512es", PART_TYPE_AVR32_UC3A1512ES, ARCH_TYPE_AVR32_UCR1, "__AVR32_UC3A1512ES__"},
  439. + {"uc3a3revd", PART_TYPE_AVR32_UC3A3REVD, ARCH_TYPE_AVR32_UCR2NOMUL, "__AVR32_UC3A3256S__"},
  440. + {"uc3a364", PART_TYPE_AVR32_UC3A364, ARCH_TYPE_AVR32_UCR2, "__AVR32_UC3A364__"},
  441. + {"uc3a364s", PART_TYPE_AVR32_UC3A364S, ARCH_TYPE_AVR32_UCR2, "__AVR32_UC3A364S__"},
  442. + {"uc3a3128", PART_TYPE_AVR32_UC3A3128, ARCH_TYPE_AVR32_UCR2, "__AVR32_UC3A3128__"},
  443. + {"uc3a3128s", PART_TYPE_AVR32_UC3A3128S, ARCH_TYPE_AVR32_UCR2, "__AVR32_UC3A3128S__"},
  444. + {"uc3a3256", PART_TYPE_AVR32_UC3A3256, ARCH_TYPE_AVR32_UCR2, "__AVR32_UC3A3256__"},
  445. + {"uc3a3256s", PART_TYPE_AVR32_UC3A3256S, ARCH_TYPE_AVR32_UCR2, "__AVR32_UC3A3256S__"},
  446. + {"uc3a464", PART_TYPE_AVR32_UC3A464, ARCH_TYPE_AVR32_UCR2, "__AVR32_UC3A464__"},
  447. + {"uc3a464s", PART_TYPE_AVR32_UC3A464S, ARCH_TYPE_AVR32_UCR2, "__AVR32_UC3A464S__"},
  448. + {"uc3a4128", PART_TYPE_AVR32_UC3A4128, ARCH_TYPE_AVR32_UCR2, "__AVR32_UC3A4128__"},
  449. + {"uc3a4128s", PART_TYPE_AVR32_UC3A4128S, ARCH_TYPE_AVR32_UCR2, "__AVR32_UC3A4128S__"},
  450. + {"uc3a4256", PART_TYPE_AVR32_UC3A4256, ARCH_TYPE_AVR32_UCR2, "__AVR32_UC3A4256__"},
  451. + {"uc3a4256s", PART_TYPE_AVR32_UC3A4256S, ARCH_TYPE_AVR32_UCR2, "__AVR32_UC3A4256S__"},
  452. + {"uc3b064", PART_TYPE_AVR32_UC3B064, ARCH_TYPE_AVR32_UCR1, "__AVR32_UC3B064__"},
  453. + {"uc3b0128", PART_TYPE_AVR32_UC3B0128, ARCH_TYPE_AVR32_UCR1, "__AVR32_UC3B0128__"},
  454. + {"uc3b0256", PART_TYPE_AVR32_UC3B0256, ARCH_TYPE_AVR32_UCR1, "__AVR32_UC3B0256__"},
  455. + {"uc3b0256es", PART_TYPE_AVR32_UC3B0256ES, ARCH_TYPE_AVR32_UCR1, "__AVR32_UC3B0256ES__"},
  456. + {"uc3b0512", PART_TYPE_AVR32_UC3B0512, ARCH_TYPE_AVR32_UCR2, "__AVR32_UC3B0512__"},
  457. + {"uc3b0512revc", PART_TYPE_AVR32_UC3B0512REVC, ARCH_TYPE_AVR32_UCR2, "__AVR32_UC3B0512REVC__"},
  458. + {"uc3b164", PART_TYPE_AVR32_UC3B164, ARCH_TYPE_AVR32_UCR1, "__AVR32_UC3B164__"},
  459. + {"uc3b1128", PART_TYPE_AVR32_UC3B1128, ARCH_TYPE_AVR32_UCR1, "__AVR32_UC3B1128__"},
  460. + {"uc3b1256", PART_TYPE_AVR32_UC3B1256, ARCH_TYPE_AVR32_UCR1, "__AVR32_UC3B1256__"},
  461. + {"uc3b1256es", PART_TYPE_AVR32_UC3B1256ES, ARCH_TYPE_AVR32_UCR1, "__AVR32_UC3B1256ES__"},
  462. + {"uc3b1512", PART_TYPE_AVR32_UC3B1512, ARCH_TYPE_AVR32_UCR2, "__AVR32_UC3B1512__"},
  463. + {"uc3b1512revc", PART_TYPE_AVR32_UC3B1512REVC, ARCH_TYPE_AVR32_UCR2, "__AVR32_UC3B1512REVC__"},
  464. + {"uc64d3", PART_TYPE_AVR32_UC64D3, ARCH_TYPE_AVR32_UCR3, "__AVR32_UC64D3__"},
  465. + {"uc128d3", PART_TYPE_AVR32_UC128D3, ARCH_TYPE_AVR32_UCR3, "__AVR32_UC128D3__"},
  466. + {"uc64d4", PART_TYPE_AVR32_UC64D4, ARCH_TYPE_AVR32_UCR3, "__AVR32_UC64D4__"},
  467. + {"uc128d4", PART_TYPE_AVR32_UC128D4, ARCH_TYPE_AVR32_UCR3, "__AVR32_UC128D4__"},
  468. + {"uc3c0512crevc", PART_TYPE_AVR32_UC3C0512CREVC, ARCH_TYPE_AVR32_UCR3, "__AVR32_UC3C0512CREVC__"},
  469. + {"uc3c1512crevc", PART_TYPE_AVR32_UC3C1512CREVC, ARCH_TYPE_AVR32_UCR3, "__AVR32_UC3C1512CREVC__"},
  470. + {"uc3c2512crevc", PART_TYPE_AVR32_UC3C2512CREVC, ARCH_TYPE_AVR32_UCR3, "__AVR32_UC3C2512CREVC__"},
  471. + {"uc3l0256", PART_TYPE_AVR32_UC3L0256, ARCH_TYPE_AVR32_UCR3, "__AVR32_UC3L0256__"},
  472. + {"uc3l0128", PART_TYPE_AVR32_UC3L0128, ARCH_TYPE_AVR32_UCR3, "__AVR32_UC3L0128__"},
  473. + {"uc3l064", PART_TYPE_AVR32_UC3L064, ARCH_TYPE_AVR32_UCR3, "__AVR32_UC3L064__"},
  474. + {"uc3l032", PART_TYPE_AVR32_UC3L032, ARCH_TYPE_AVR32_UCR3, "__AVR32_UC3L032__"},
  475. + {"uc3l016", PART_TYPE_AVR32_UC3L016, ARCH_TYPE_AVR32_UCR3, "__AVR32_UC3L016__"},
  476. + {"uc3l064revb", PART_TYPE_AVR32_UC3L064REVB, ARCH_TYPE_AVR32_UCR3, "__AVR32_UC3L064REVB__"},
  477. + {"uc64l3u", PART_TYPE_AVR32_UC64L3U, ARCH_TYPE_AVR32_UCR3, "__AVR32_UC64L3U__"},
  478. + {"uc128l3u", PART_TYPE_AVR32_UC128L3U, ARCH_TYPE_AVR32_UCR3, "__AVR32_UC128L3U__"},
  479. + {"uc256l3u", PART_TYPE_AVR32_UC256L3U, ARCH_TYPE_AVR32_UCR3, "__AVR32_UC256L3U__"},
  480. + {"uc64l4u", PART_TYPE_AVR32_UC64L4U, ARCH_TYPE_AVR32_UCR3, "__AVR32_UC64L4U__"},
  481. + {"uc128l4u", PART_TYPE_AVR32_UC128L4U, ARCH_TYPE_AVR32_UCR3, "__AVR32_UC128L4U__"},
  482. + {"uc256l4u", PART_TYPE_AVR32_UC256L4U, ARCH_TYPE_AVR32_UCR3, "__AVR32_UC256L4U__"},
  483. + {"uc3c064c", PART_TYPE_AVR32_UC3C064C, ARCH_TYPE_AVR32_UCR3FP, "__AVR32_UC3C064C__"},
  484. + {"uc3c0128c", PART_TYPE_AVR32_UC3C0128C, ARCH_TYPE_AVR32_UCR3FP, "__AVR32_UC3C0128C__"},
  485. + {"uc3c0256c", PART_TYPE_AVR32_UC3C0256C, ARCH_TYPE_AVR32_UCR3FP, "__AVR32_UC3C0256C__"},
  486. + {"uc3c0512c", PART_TYPE_AVR32_UC3C0512C, ARCH_TYPE_AVR32_UCR3FP, "__AVR32_UC3C0512C__"},
  487. + {"uc3c164c", PART_TYPE_AVR32_UC3C164C, ARCH_TYPE_AVR32_UCR3FP, "__AVR32_UC3C164C__"},
  488. + {"uc3c1128c", PART_TYPE_AVR32_UC3C1128C, ARCH_TYPE_AVR32_UCR3FP, "__AVR32_UC3C1128C__"},
  489. + {"uc3c1256c", PART_TYPE_AVR32_UC3C1256C, ARCH_TYPE_AVR32_UCR3FP, "__AVR32_UC3C1256C__"},
  490. + {"uc3c1512c", PART_TYPE_AVR32_UC3C1512C, ARCH_TYPE_AVR32_UCR3FP, "__AVR32_UC3C1512C__"},
  491. + {"uc3c264c", PART_TYPE_AVR32_UC3C264C, ARCH_TYPE_AVR32_UCR3FP, "__AVR32_UC3C264C__"},
  492. + {"uc3c2128c", PART_TYPE_AVR32_UC3C2128C, ARCH_TYPE_AVR32_UCR3FP, "__AVR32_UC3C2128C__"},
  493. + {"uc3c2256c", PART_TYPE_AVR32_UC3C2256C, ARCH_TYPE_AVR32_UCR3FP, "__AVR32_UC3C2256C__"},
  494. + {"uc3c2512c", PART_TYPE_AVR32_UC3C2512C, ARCH_TYPE_AVR32_UCR3FP, "__AVR32_UC3C2512C__"},
  495. + {"mxt768e", PART_TYPE_AVR32_MXT768E, ARCH_TYPE_AVR32_UCR3, "__AVR32_MXT768E__"},
  496. + {NULL, 0, 0, NULL}
  497. +};
  498. +
  499. +/* List of all known AVR32 architectures */
  500. +static const struct arch_type_s avr32_arch_types[] = {
  501. + /* name, architecture type, microarchitecture type, feature flags, macro */
  502. + {"ap", ARCH_TYPE_AVR32_AP, UARCH_TYPE_AVR32B,
  503. + (FLAG_AVR32_HAS_DSP
  504. + | FLAG_AVR32_HAS_SIMD
  505. + | FLAG_AVR32_HAS_UNALIGNED_WORD
  506. + | FLAG_AVR32_HAS_BRANCH_PRED | FLAG_AVR32_HAS_RETURN_STACK
  507. + | FLAG_AVR32_HAS_CACHES),
  508. + "__AVR32_AP__"},
  509. + {"ucr1", ARCH_TYPE_AVR32_UCR1, UARCH_TYPE_AVR32A,
  510. + (FLAG_AVR32_HAS_DSP | FLAG_AVR32_HAS_RMW),
  511. + "__AVR32_UC__=1"},
  512. + {"ucr2", ARCH_TYPE_AVR32_UCR2, UARCH_TYPE_AVR32A,
  513. + (FLAG_AVR32_HAS_DSP | FLAG_AVR32_HAS_RMW
  514. + | FLAG_AVR32_HAS_V2_INSNS),
  515. + "__AVR32_UC__=2"},
  516. + {"ucr2nomul", ARCH_TYPE_AVR32_UCR2NOMUL, UARCH_TYPE_AVR32A,
  517. + (FLAG_AVR32_HAS_DSP | FLAG_AVR32_HAS_RMW
  518. + | FLAG_AVR32_HAS_V2_INSNS | FLAG_AVR32_HAS_NO_MUL_INSNS),
  519. + "__AVR32_UC__=2"},
  520. + {"ucr3", ARCH_TYPE_AVR32_UCR3, UARCH_TYPE_AVR32A,
  521. + (FLAG_AVR32_HAS_DSP | FLAG_AVR32_HAS_RMW
  522. + | FLAG_AVR32_HAS_V2_INSNS),
  523. + "__AVR32_UC__=3"},
  524. + {"ucr3fp", ARCH_TYPE_AVR32_UCR3FP, UARCH_TYPE_AVR32A,
  525. + (FLAG_AVR32_HAS_DSP | FLAG_AVR32_HAS_RMW | FLAG_AVR32_HAS_FPU
  526. + | FLAG_AVR32_HAS_V2_INSNS),
  527. + "__AVR32_UC__=3"},
  528. + {NULL, 0, 0, 0, NULL}
  529. +};
  530. +
  531. +/* Default arch name */
  532. +const char *avr32_arch_name = "none";
  533. +const char *avr32_part_name = "none";
  534. +
  535. +const struct part_type_s *avr32_part;
  536. +const struct arch_type_s *avr32_arch;
  537. +
  538. +
  539. +/* FIXME: needs to use GC. */
  540. +struct flashvault_decl_list
  541. +{
  542. + struct flashvault_decl_list *next;
  543. + unsigned int vector_num;
  544. + const char *name;
  545. +};
  546. +
  547. +static struct flashvault_decl_list *flashvault_decl_list_head = NULL;
  548. +
  549. +
  550. +/* Set default target_flags. */
  551. +#undef TARGET_DEFAULT_TARGET_FLAGS
  552. +#define TARGET_DEFAULT_TARGET_FLAGS \
  553. + (MASK_HAS_ASM_ADDR_PSEUDOS | MASK_MD_REORG_OPTIMIZATION | MASK_COND_EXEC_BEFORE_RELOAD)
  554. +
  555. +void
  556. +avr32_optimization_options (int level, int size)
  557. +{
  558. + if (AVR32_ALWAYS_PIC)
  559. + flag_pic = 1;
  560. +
  561. + /* Enable section anchors if optimization is enabled. */
  562. + if (level > 0 || size)
  563. + flag_section_anchors = 2;
  564. +}
  565. +
  566. +
  567. +/* Override command line options */
  568. +void
  569. +avr32_override_options (void)
  570. +{
  571. + const struct part_type_s *part;
  572. + const struct arch_type_s *arch, *part_arch;
  573. +
  574. + /*Add backward compability*/
  575. + if (strcmp ("uc", avr32_arch_name)== 0)
  576. + {
  577. + fprintf (stderr, "Warning: Deprecated arch `%s' specified. "
  578. + "Please use '-march=ucr1' instead. "
  579. + "Using arch 'ucr1'\n",
  580. + avr32_arch_name);
  581. + avr32_arch_name="ucr1";
  582. + }
  583. +
  584. + /* Check if arch type is set. */
  585. + for (arch = avr32_arch_types; arch->name; arch++)
  586. + {
  587. + if (strcmp (arch->name, avr32_arch_name) == 0)
  588. + break;
  589. + }
  590. + avr32_arch = arch;
  591. +
  592. + if (!arch->name && strcmp("none", avr32_arch_name) != 0)
  593. + {
  594. + fprintf (stderr, "Unknown arch `%s' specified\n"
  595. + "Known arch names:\n"
  596. + "\tuc (deprecated)\n",
  597. + avr32_arch_name);
  598. + for (arch = avr32_arch_types; arch->name; arch++)
  599. + fprintf (stderr, "\t%s\n", arch->name);
  600. + avr32_arch = &avr32_arch_types[ARCH_TYPE_AVR32_AP];
  601. + }
  602. +
  603. + /* Check if part type is set. */
  604. + for (part = avr32_part_types; part->name; part++)
  605. + if (strcmp (part->name, avr32_part_name) == 0)
  606. + break;
  607. +
  608. + avr32_part = part;
  609. + if (!part->name)
  610. + {
  611. + fprintf (stderr, "Unknown part `%s' specified\nKnown part names:\n",
  612. + avr32_part_name);
  613. + for (part = avr32_part_types; part->name; part++)
  614. + {
  615. + if (strcmp("none", part->name) != 0)
  616. + fprintf (stderr, "\t%s\n", part->name);
  617. + }
  618. + /* Set default to NONE*/
  619. + avr32_part = &avr32_part_types[PART_TYPE_AVR32_NONE];
  620. + }
  621. +
  622. + /* NB! option -march= overrides option -mpart
  623. + * if both are used at the same time */
  624. + if (!arch->name)
  625. + avr32_arch = &avr32_arch_types[avr32_part->arch_type];
  626. +
  627. + /* When architecture implied by -mpart and one passed in -march are
  628. + * conflicting, issue an error message */
  629. + part_arch = &avr32_arch_types[avr32_part->arch_type];
  630. + if (strcmp("none",avr32_part_name) && strcmp("none", avr32_arch_name) && strcmp(avr32_arch_name,part_arch->name))
  631. + error ("Conflicting architectures implied by -mpart and -march\n");
  632. +
  633. + /* If optimization level is two or greater, then align start of loops to a
  634. + word boundary since this will allow folding the first insn of the loop.
  635. + Do this only for targets supporting branch prediction. */
  636. + if (optimize >= 2 && TARGET_BRANCH_PRED)
  637. + align_loops = 2;
  638. +
  639. +
  640. + /* Enable fast-float library if unsafe math optimizations
  641. + are used. */
  642. + if (flag_unsafe_math_optimizations)
  643. + target_flags |= MASK_FAST_FLOAT;
  644. +
  645. + /* Check if we should set avr32_imm_in_const_pool
  646. + based on if caches are present or not. */
  647. + if ( avr32_imm_in_const_pool == -1 )
  648. + {
  649. + if ( TARGET_CACHES )
  650. + avr32_imm_in_const_pool = 1;
  651. + else
  652. + avr32_imm_in_const_pool = 0;
  653. + }
  654. +
  655. + if (TARGET_NO_PIC)
  656. + flag_pic = 0;
  657. + avr32_add_gc_roots ();
  658. +}
  659. +
  660. +
  661. +/*
  662. +If defined, a function that outputs the assembler code for entry to a
  663. +function. The prologue is responsible for setting up the stack frame,
  664. +initializing the frame pointer register, saving registers that must be
  665. +saved, and allocating size additional bytes of storage for the
  666. +local variables. size is an integer. file is a stdio
  667. +stream to which the assembler code should be output.
  668. +
  669. +The label for the beginning of the function need not be output by this
  670. +macro. That has already been done when the macro is run.
  671. +
  672. +To determine which registers to save, the macro can refer to the array
  673. +regs_ever_live: element r is nonzero if hard register
  674. +r is used anywhere within the function. This implies the function
  675. +prologue should save register r, provided it is not one of the
  676. +call-used registers. (TARGET_ASM_FUNCTION_EPILOGUE must likewise use
  677. +regs_ever_live.)
  678. +
  679. +On machines that have ``register windows'', the function entry code does
  680. +not save on the stack the registers that are in the windows, even if
  681. +they are supposed to be preserved by function calls; instead it takes
  682. +appropriate steps to ``push'' the register stack, if any non-call-used
  683. +registers are used in the function.
  684. +
  685. +On machines where functions may or may not have frame-pointers, the
  686. +function entry code must vary accordingly; it must set up the frame
  687. +pointer if one is wanted, and not otherwise. To determine whether a
  688. +frame pointer is in wanted, the macro can refer to the variable
  689. +frame_pointer_needed. The variable's value will be 1 at run
  690. +time in a function that needs a frame pointer. (see Elimination).
  691. +
  692. +The function entry code is responsible for allocating any stack space
  693. +required for the function. This stack space consists of the regions
  694. +listed below. In most cases, these regions are allocated in the
  695. +order listed, with the last listed region closest to the top of the
  696. +stack (the lowest address if STACK_GROWS_DOWNWARD is defined, and
  697. +the highest address if it is not defined). You can use a different order
  698. +for a machine if doing so is more convenient or required for
  699. +compatibility reasons. Except in cases where required by standard
  700. +or by a debugger, there is no reason why the stack layout used by GCC
  701. +need agree with that used by other compilers for a machine.
  702. +*/
  703. +
  704. +#undef TARGET_ASM_FUNCTION_PROLOGUE
  705. +#define TARGET_ASM_FUNCTION_PROLOGUE avr32_target_asm_function_prologue
  706. +
  707. +#undef TARGET_ASM_FILE_END
  708. +#define TARGET_ASM_FILE_END avr32_file_end
  709. +
  710. +#undef TARGET_DEFAULT_SHORT_ENUMS
  711. +#define TARGET_DEFAULT_SHORT_ENUMS hook_bool_void_false
  712. +
  713. +#undef TARGET_PROMOTE_FUNCTION_ARGS
  714. +#define TARGET_PROMOTE_FUNCTION_ARGS hook_bool_tree_true
  715. +
  716. +#undef TARGET_PROMOTE_FUNCTION_RETURN
  717. +#define TARGET_PROMOTE_FUNCTION_RETURN hook_bool_tree_true
  718. +
  719. +#undef TARGET_PROMOTE_PROTOTYPES
  720. +#define TARGET_PROMOTE_PROTOTYPES hook_bool_tree_true
  721. +
  722. +#undef TARGET_MUST_PASS_IN_STACK
  723. +#define TARGET_MUST_PASS_IN_STACK avr32_must_pass_in_stack
  724. +
  725. +#undef TARGET_PASS_BY_REFERENCE
  726. +#define TARGET_PASS_BY_REFERENCE avr32_pass_by_reference
  727. +
  728. +#undef TARGET_STRICT_ARGUMENT_NAMING
  729. +#define TARGET_STRICT_ARGUMENT_NAMING avr32_strict_argument_naming
  730. +
  731. +#undef TARGET_VECTOR_MODE_SUPPORTED_P
  732. +#define TARGET_VECTOR_MODE_SUPPORTED_P avr32_vector_mode_supported
  733. +
  734. +#undef TARGET_RETURN_IN_MEMORY
  735. +#define TARGET_RETURN_IN_MEMORY avr32_return_in_memory
  736. +
  737. +#undef TARGET_RETURN_IN_MSB
  738. +#define TARGET_RETURN_IN_MSB avr32_return_in_msb
  739. +
  740. +#undef TARGET_ENCODE_SECTION_INFO
  741. +#define TARGET_ENCODE_SECTION_INFO avr32_encode_section_info
  742. +
  743. +#undef TARGET_ARG_PARTIAL_BYTES
  744. +#define TARGET_ARG_PARTIAL_BYTES avr32_arg_partial_bytes
  745. +
  746. +#undef TARGET_STRIP_NAME_ENCODING
  747. +#define TARGET_STRIP_NAME_ENCODING avr32_strip_name_encoding
  748. +
  749. +#define streq(string1, string2) (strcmp (string1, string2) == 0)
  750. +
  751. +#undef TARGET_NARROW_VOLATILE_BITFIELD
  752. +#define TARGET_NARROW_VOLATILE_BITFIELD hook_bool_void_false
  753. +
  754. +#undef TARGET_ATTRIBUTE_TABLE
  755. +#define TARGET_ATTRIBUTE_TABLE avr32_attribute_table
  756. +
  757. +#undef TARGET_COMP_TYPE_ATTRIBUTES
  758. +#define TARGET_COMP_TYPE_ATTRIBUTES avr32_comp_type_attributes
  759. +
  760. +
  761. +#undef TARGET_RTX_COSTS
  762. +#define TARGET_RTX_COSTS avr32_rtx_costs
  763. +
  764. +#undef TARGET_CANNOT_FORCE_CONST_MEM
  765. +#define TARGET_CANNOT_FORCE_CONST_MEM avr32_cannot_force_const_mem
  766. +
  767. +#undef TARGET_ASM_INTEGER
  768. +#define TARGET_ASM_INTEGER avr32_assemble_integer
  769. +
  770. +#undef TARGET_FUNCTION_VALUE
  771. +#define TARGET_FUNCTION_VALUE avr32_function_value
  772. +
  773. +#undef TARGET_MIN_ANCHOR_OFFSET
  774. +#define TARGET_MIN_ANCHOR_OFFSET (0)
  775. +
  776. +#undef TARGET_MAX_ANCHOR_OFFSET
  777. +#define TARGET_MAX_ANCHOR_OFFSET ((1 << 15) - 1)
  778. +#undef TARGET_SECONDARY_RELOAD
  779. +#define TARGET_SECONDARY_RELOAD avr32_secondary_reload
  780. +
  781. +
  782. +/*
  783. + * Defining the option, -mlist-devices to list the devices supported by gcc.
  784. + * This option should be used while printing target-help to list all the
  785. + * supported devices.
  786. + */
  787. +#undef TARGET_HELP
  788. +#define TARGET_HELP avr32_target_help
  789. +
  790. +void avr32_target_help ()
  791. +{
  792. + if (avr32_list_supported_parts)
  793. + {
  794. + const struct part_type_s *list;
  795. + fprintf (stdout, "List of parts supported by avr32-gcc:\n");
  796. + for (list = avr32_part_types; list->name; list++)
  797. + {
  798. + if (strcmp("none", list->name) != 0)
  799. + fprintf (stdout, "%-20s%s\n", list->name, list->macro);
  800. + }
  801. + fprintf (stdout, "\n\n");
  802. + }
  803. +}
  804. +
  805. +enum reg_class
  806. +avr32_secondary_reload (bool in_p, rtx x, enum reg_class class,
  807. + enum machine_mode mode, secondary_reload_info *sri)
  808. +{
  809. +
  810. + if ( avr32_rmw_memory_operand (x, mode) )
  811. + {
  812. + if (!in_p)
  813. + sri->icode = CODE_FOR_reload_out_rmw_memory_operand;
  814. + else
  815. + sri->icode = CODE_FOR_reload_in_rmw_memory_operand;
  816. + }
  817. + return NO_REGS;
  818. +
  819. +}
  820. +/*
  821. + * Switches to the appropriate section for output of constant pool
  822. + * entry x in mode. You can assume that x is some kind of constant in
  823. + * RTL. The argument mode is redundant except in the case of a
  824. + * const_int rtx. Select the section by calling readonly_data_ section
  825. + * or one of the alternatives for other sections. align is the
  826. + * constant alignment in bits.
  827. + *
  828. + * The default version of this function takes care of putting symbolic
  829. + * constants in flag_ pic mode in data_section and everything else in
  830. + * readonly_data_section.
  831. + */
  832. +//#undef TARGET_ASM_SELECT_RTX_SECTION
  833. +//#define TARGET_ASM_SELECT_RTX_SECTION avr32_select_rtx_section
  834. +
  835. +
  836. +/*
  837. + * If non-null, this hook performs a target-specific pass over the
  838. + * instruction stream. The compiler will run it at all optimization
  839. + * levels, just before the point at which it normally does
  840. + * delayed-branch scheduling.
  841. + *
  842. + * The exact purpose of the hook varies from target to target. Some
  843. + * use it to do transformations that are necessary for correctness,
  844. + * such as laying out in-function constant pools or avoiding hardware
  845. + * hazards. Others use it as an opportunity to do some
  846. + * machine-dependent optimizations.
  847. + *
  848. + * You need not implement the hook if it has nothing to do. The
  849. + * default definition is null.
  850. + */
  851. +#undef TARGET_MACHINE_DEPENDENT_REORG
  852. +#define TARGET_MACHINE_DEPENDENT_REORG avr32_reorg
  853. +
  854. +/* Target hook for assembling integer objects.
  855. + Need to handle integer vectors */
  856. +static bool
  857. +avr32_assemble_integer (rtx x, unsigned int size, int aligned_p)
  858. +{
  859. + if (avr32_vector_mode_supported (GET_MODE (x)))
  860. + {
  861. + int i, units;
  862. +
  863. + if (GET_CODE (x) != CONST_VECTOR)
  864. + abort ();
  865. +
  866. + units = CONST_VECTOR_NUNITS (x);
  867. +
  868. + switch (GET_MODE (x))
  869. + {
  870. + case V2HImode:
  871. + size = 2;
  872. + break;
  873. + case V4QImode:
  874. + size = 1;
  875. + break;
  876. + default:
  877. + abort ();
  878. + }
  879. +
  880. + for (i = 0; i < units; i++)
  881. + {
  882. + rtx elt;
  883. +
  884. + elt = CONST_VECTOR_ELT (x, i);
  885. + assemble_integer (elt, size, i == 0 ? 32 : size * BITS_PER_UNIT, 1);
  886. + }
  887. +
  888. + return true;
  889. + }
  890. +
  891. + return default_assemble_integer (x, size, aligned_p);
  892. +}
  893. +
  894. +
  895. +/*
  896. + * This target hook describes the relative costs of RTL expressions.
  897. + *
  898. + * The cost may depend on the precise form of the expression, which is
  899. + * available for examination in x, and the rtx code of the expression
  900. + * in which it is contained, found in outer_code. code is the
  901. + * expression code--redundant, since it can be obtained with GET_CODE
  902. + * (x).
  903. + *
  904. + * In implementing this hook, you can use the construct COSTS_N_INSNS
  905. + * (n) to specify a cost equal to n fast instructions.
  906. + *
  907. + * On entry to the hook, *total contains a default estimate for the
  908. + * cost of the expression. The hook should modify this value as
  909. + * necessary. Traditionally, the default costs are COSTS_N_INSNS (5)
  910. + * for multiplications, COSTS_N_INSNS (7) for division and modulus
  911. + * operations, and COSTS_N_INSNS (1) for all other operations.
  912. + *
  913. + * When optimizing for code size, i.e. when optimize_size is non-zero,
  914. + * this target hook should be used to estimate the relative size cost
  915. + * of an expression, again relative to COSTS_N_INSNS.
  916. + *
  917. + * The hook returns true when all subexpressions of x have been
  918. + * processed, and false when rtx_cost should recurse.
  919. + */
  920. +
  921. +/* Worker routine for avr32_rtx_costs. */
  922. +static inline int
  923. +avr32_rtx_costs_1 (rtx x, enum rtx_code code ATTRIBUTE_UNUSED,
  924. + enum rtx_code outer ATTRIBUTE_UNUSED)
  925. +{
  926. + enum machine_mode mode = GET_MODE (x);
  927. +
  928. + switch (GET_CODE (x))
  929. + {
  930. + case MEM:
  931. + /* Using pre decrement / post increment memory operations on the
  932. + avr32_uc architecture means that two writebacks must be performed
  933. + and hence two cycles are needed. */
  934. + if (!optimize_size
  935. + && GET_MODE_SIZE (mode) <= 2 * UNITS_PER_WORD
  936. + && TARGET_ARCH_UC
  937. + && (GET_CODE (XEXP (x, 0)) == PRE_DEC
  938. + || GET_CODE (XEXP (x, 0)) == POST_INC))
  939. + return COSTS_N_INSNS (5);
  940. +
  941. + /* Memory costs quite a lot for the first word, but subsequent words
  942. + load at the equivalent of a single insn each. */
  943. + if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
  944. + return COSTS_N_INSNS (3 + (GET_MODE_SIZE (mode) / UNITS_PER_WORD));
  945. +
  946. + return COSTS_N_INSNS (4);
  947. + case SYMBOL_REF:
  948. + case CONST:
  949. + /* These are valid for the pseudo insns: lda.w and call which operates
  950. + on direct addresses. We assume that the cost of a lda.w is the same
  951. + as the cost of a ld.w insn. */
  952. + return (outer == SET) ? COSTS_N_INSNS (4) : COSTS_N_INSNS (1);
  953. + case DIV:
  954. + case MOD:
  955. + case UDIV:
  956. + case UMOD:
  957. + return optimize_size ? COSTS_N_INSNS (1) : COSTS_N_INSNS (16);
  958. +
  959. + case ROTATE:
  960. + case ROTATERT:
  961. + if (mode == TImode)
  962. + return COSTS_N_INSNS (100);
  963. +
  964. + if (mode == DImode)
  965. + return COSTS_N_INSNS (10);
  966. + return COSTS_N_INSNS (4);
  967. + case ASHIFT:
  968. + case LSHIFTRT:
  969. + case ASHIFTRT:
  970. + case NOT:
  971. + if (mode == TImode)
  972. + return COSTS_N_INSNS (10);
  973. +
  974. + if (mode == DImode)
  975. + return COSTS_N_INSNS (4);
  976. + return COSTS_N_INSNS (1);
  977. + case PLUS:
  978. + case MINUS:
  979. + case NEG:
  980. + case COMPARE:
  981. + case ABS:
  982. + if (GET_MODE_CLASS (mode) == MODE_FLOAT)
  983. + return COSTS_N_INSNS (100);
  984. +
  985. + if (mode == TImode)
  986. + return COSTS_N_INSNS (50);
  987. +
  988. + if (mode == DImode)
  989. + return COSTS_N_INSNS (2);
  990. + return COSTS_N_INSNS (1);
  991. +
  992. + case MULT:
  993. + {
  994. + if (GET_MODE_CLASS (mode) == MODE_FLOAT)
  995. + return COSTS_N_INSNS (300);
  996. +
  997. + if (mode == TImode)
  998. + return COSTS_N_INSNS (16);
  999. +
  1000. + if (mode == DImode)
  1001. + return COSTS_N_INSNS (4);
  1002. +
  1003. + if (mode == HImode)
  1004. + return COSTS_N_INSNS (2);
  1005. +
  1006. + return COSTS_N_INSNS (3);
  1007. + }
  1008. + case IF_THEN_ELSE:
  1009. + if (GET_CODE (XEXP (x, 1)) == PC || GET_CODE (XEXP (x, 2)) == PC)
  1010. + return COSTS_N_INSNS (4);
  1011. + return COSTS_N_INSNS (1);
  1012. + case SIGN_EXTEND:
  1013. + case ZERO_EXTEND:
  1014. + /* Sign/Zero extensions of registers cost quite much since these
  1015. + instrcutions only take one register operand which means that gcc
  1016. + often must insert some move instrcutions */
  1017. + if (mode == QImode || mode == HImode)
  1018. + return (COSTS_N_INSNS (GET_CODE (XEXP (x, 0)) == MEM ? 0 : 1));
  1019. + return COSTS_N_INSNS (4);
  1020. + case UNSPEC:
  1021. + /* divmod operations */
  1022. + if (XINT (x, 1) == UNSPEC_UDIVMODSI4_INTERNAL
  1023. + || XINT (x, 1) == UNSPEC_DIVMODSI4_INTERNAL)
  1024. + {
  1025. + return optimize_size ? COSTS_N_INSNS (1) : COSTS_N_INSNS (16);
  1026. + }
  1027. + /* Fallthrough */
  1028. + default:
  1029. + return COSTS_N_INSNS (1);
  1030. + }
  1031. +}
  1032. +
  1033. +
  1034. +static bool
  1035. +avr32_rtx_costs (rtx x, int code, int outer_code, int *total)
  1036. +{
  1037. + *total = avr32_rtx_costs_1 (x, code, outer_code);
  1038. + return true;
  1039. +}
  1040. +
  1041. +
  1042. +bool
  1043. +avr32_cannot_force_const_mem (rtx x ATTRIBUTE_UNUSED)
  1044. +{
  1045. + /* Do not want symbols in the constant pool when compiling pic or if using
  1046. + address pseudo instructions. */
  1047. + return ((flag_pic || TARGET_HAS_ASM_ADDR_PSEUDOS)
  1048. + && avr32_find_symbol (x) != NULL_RTX);
  1049. +}
  1050. +
  1051. +
  1052. +/* Table of machine attributes. */
  1053. +const struct attribute_spec avr32_attribute_table[] = {
  1054. + /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
  1055. + /* Interrupt Service Routines have special prologue and epilogue
  1056. + requirements. */
  1057. + {"isr", 0, 1, false, false, false, avr32_handle_isr_attribute},
  1058. + {"interrupt", 0, 1, false, false, false, avr32_handle_isr_attribute},
  1059. + {"acall", 0, 1, false, true, true, avr32_handle_acall_attribute},
  1060. + {"naked", 0, 0, true, false, false, avr32_handle_fndecl_attribute},
  1061. + {"rmw_addressable", 0, 0, true, false, false, NULL},
  1062. + {"flashvault", 0, 1, true, false, false, avr32_handle_fndecl_attribute},
  1063. + {"flashvault_impl", 0, 1, true, false, false, avr32_handle_fndecl_attribute},
  1064. + {NULL, 0, 0, false, false, false, NULL}
  1065. +};
  1066. +
  1067. +
  1068. +typedef struct
  1069. +{
  1070. + const char *const arg;
  1071. + const unsigned long return_value;
  1072. +}
  1073. +isr_attribute_arg;
  1074. +
  1075. +
  1076. +static const isr_attribute_arg isr_attribute_args[] = {
  1077. + {"FULL", AVR32_FT_ISR_FULL},
  1078. + {"full", AVR32_FT_ISR_FULL},
  1079. + {"HALF", AVR32_FT_ISR_HALF},
  1080. + {"half", AVR32_FT_ISR_HALF},
  1081. + {"NONE", AVR32_FT_ISR_NONE},
  1082. + {"none", AVR32_FT_ISR_NONE},
  1083. + {"UNDEF", AVR32_FT_ISR_NONE},
  1084. + {"undef", AVR32_FT_ISR_NONE},
  1085. + {"SWI", AVR32_FT_ISR_NONE},
  1086. + {"swi", AVR32_FT_ISR_NONE},
  1087. + {NULL, AVR32_FT_ISR_NONE}
  1088. +};
  1089. +
  1090. +
  1091. +/* Returns the (interrupt) function type of the current
  1092. + function, or AVR32_FT_UNKNOWN if the type cannot be determined. */
  1093. +static unsigned long
  1094. +avr32_isr_value (tree argument)
  1095. +{
  1096. + const isr_attribute_arg *ptr;
  1097. + const char *arg;
  1098. +
  1099. + /* No argument - default to ISR_NONE. */
  1100. + if (argument == NULL_TREE)
  1101. + return AVR32_FT_ISR_NONE;
  1102. +
  1103. + /* Get the value of the argument. */
  1104. + if (TREE_VALUE (argument) == NULL_TREE
  1105. + || TREE_CODE (TREE_VALUE (argument)) != STRING_CST)
  1106. + return AVR32_FT_UNKNOWN;
  1107. +
  1108. + arg = TREE_STRING_POINTER (TREE_VALUE (argument));
  1109. +
  1110. + /* Check it against the list of known arguments. */
  1111. + for (ptr = isr_attribute_args; ptr->arg != NULL; ptr++)
  1112. + if (streq (arg, ptr->arg))
  1113. + return ptr->return_value;
  1114. +
  1115. + /* An unrecognized interrupt type. */
  1116. + return AVR32_FT_UNKNOWN;
  1117. +}
  1118. +
  1119. +
  1120. +/*
  1121. +These hooks specify assembly directives for creating certain kinds
  1122. +of integer object. The TARGET_ASM_BYTE_OP directive creates a
  1123. +byte-sized object, the TARGET_ASM_ALIGNED_HI_OP one creates an
  1124. +aligned two-byte object, and so on. Any of the hooks may be
  1125. +NULL, indicating that no suitable directive is available.
  1126. +
  1127. +The compiler will print these strings at the start of a new line,
  1128. +followed immediately by the object's initial value. In most cases,
  1129. +the string should contain a tab, a pseudo-op, and then another tab.
  1130. +*/
  1131. +#undef TARGET_ASM_BYTE_OP
  1132. +#define TARGET_ASM_BYTE_OP "\t.byte\t"
  1133. +#undef TARGET_ASM_ALIGNED_HI_OP
  1134. +#define TARGET_ASM_ALIGNED_HI_OP "\t.align 1\n\t.short\t"
  1135. +#undef TARGET_ASM_ALIGNED_SI_OP
  1136. +#define TARGET_ASM_ALIGNED_SI_OP "\t.align 2\n\t.int\t"
  1137. +#undef TARGET_ASM_ALIGNED_DI_OP
  1138. +#define TARGET_ASM_ALIGNED_DI_OP NULL
  1139. +#undef TARGET_ASM_ALIGNED_TI_OP
  1140. +#define TARGET_ASM_ALIGNED_TI_OP NULL
  1141. +#undef TARGET_ASM_UNALIGNED_HI_OP
  1142. +#define TARGET_ASM_UNALIGNED_HI_OP "\t.short\t"
  1143. +#undef TARGET_ASM_UNALIGNED_SI_OP
  1144. +#define TARGET_ASM_UNALIGNED_SI_OP "\t.int\t"
  1145. +#undef TARGET_ASM_UNALIGNED_DI_OP
  1146. +#define TARGET_ASM_UNALIGNED_DI_OP NULL
  1147. +#undef TARGET_ASM_UNALIGNED_TI_OP
  1148. +#define TARGET_ASM_UNALIGNED_TI_OP NULL
  1149. +
  1150. +#undef TARGET_ASM_OUTPUT_MI_THUNK
  1151. +#define TARGET_ASM_OUTPUT_MI_THUNK avr32_output_mi_thunk
  1152. +
  1153. +#undef TARGET_ASM_CAN_OUTPUT_MI_THUNK
  1154. +#define TARGET_ASM_CAN_OUTPUT_MI_THUNK hook_bool_const_tree_hwi_hwi_const_tree_true
  1155. +
  1156. +
  1157. +static void
  1158. +avr32_output_mi_thunk (FILE * file,
  1159. + tree thunk ATTRIBUTE_UNUSED,
  1160. + HOST_WIDE_INT delta,
  1161. + HOST_WIDE_INT vcall_offset, tree function)
  1162. + {
  1163. + int mi_delta = delta;
  1164. + int this_regno =
  1165. + (aggregate_value_p (TREE_TYPE (TREE_TYPE (function)), function) ?
  1166. + INTERNAL_REGNUM (11) : INTERNAL_REGNUM (12));
  1167. +
  1168. +
  1169. + if (!avr32_const_ok_for_constraint_p (mi_delta, 'I', "Is21")
  1170. + || vcall_offset)
  1171. + {
  1172. + fputs ("\tpushm\tlr\n", file);
  1173. + }
  1174. +
  1175. +
  1176. + if (mi_delta != 0)
  1177. + {
  1178. + if (avr32_const_ok_for_constraint_p (mi_delta, 'I', "Is21"))
  1179. + {
  1180. + fprintf (file, "\tsub\t%s, %d\n", reg_names[this_regno], -mi_delta);
  1181. + }
  1182. + else
  1183. + {
  1184. + /* Immediate is larger than k21 we must make us a temp register by
  1185. + pushing a register to the stack. */
  1186. + fprintf (file, "\tmov\tlr, lo(%d)\n", mi_delta);
  1187. + fprintf (file, "\torh\tlr, hi(%d)\n", mi_delta);
  1188. + fprintf (file, "\tadd\t%s, lr\n", reg_names[this_regno]);
  1189. + }
  1190. + }
  1191. +
  1192. +
  1193. + if (vcall_offset != 0)
  1194. + {
  1195. + fprintf (file, "\tld.w\tlr, %s[0]\n", reg_names[this_regno]);
  1196. + fprintf (file, "\tld.w\tlr, lr[%i]\n", (int) vcall_offset);
  1197. + fprintf (file, "\tadd\t%s, lr\n", reg_names[this_regno]);
  1198. + }
  1199. +
  1200. +
  1201. + if (!avr32_const_ok_for_constraint_p (mi_delta, 'I', "Is21")
  1202. + || vcall_offset)
  1203. + {
  1204. + fputs ("\tpopm\tlr\n", file);
  1205. + }
  1206. +
  1207. + /* Jump to the function. We assume that we can use an rjmp since the
  1208. + function to jump to is local and probably not too far away from
  1209. + the thunk. If this assumption proves to be wrong we could implement
  1210. + this jump by calculating the offset between the jump source and destination
  1211. + and put this in the constant pool and then perform an add to pc.
  1212. + This would also be legitimate PIC code. But for now we hope that an rjmp
  1213. + will be sufficient...
  1214. + */
  1215. + fputs ("\trjmp\t", file);
  1216. + assemble_name (file, XSTR (XEXP (DECL_RTL (function), 0), 0));
  1217. + fputc ('\n', file);
  1218. + }
  1219. +
  1220. +
  1221. +/* Implements target hook vector_mode_supported. */
  1222. +bool
  1223. +avr32_vector_mode_supported (enum machine_mode mode)
  1224. +{
  1225. + if ((mode == V2HImode) || (mode == V4QImode))
  1226. + return true;
  1227. +
  1228. + return false;
  1229. +}
  1230. +
  1231. +
  1232. +#undef TARGET_INIT_LIBFUNCS
  1233. +#define TARGET_INIT_LIBFUNCS avr32_init_libfuncs
  1234. +
  1235. +#undef TARGET_INIT_BUILTINS
  1236. +#define TARGET_INIT_BUILTINS avr32_init_builtins
  1237. +
  1238. +#undef TARGET_EXPAND_BUILTIN
  1239. +#define TARGET_EXPAND_BUILTIN avr32_expand_builtin
  1240. +
  1241. +tree int_ftype_int, int_ftype_void, short_ftype_short, void_ftype_int_int,
  1242. + void_ftype_ptr_int;
  1243. +tree void_ftype_int, void_ftype_ulong, void_ftype_void, int_ftype_ptr_int;
  1244. +tree short_ftype_short, int_ftype_int_short, int_ftype_short_short,
  1245. + short_ftype_short_short;
  1246. +tree int_ftype_int_int, longlong_ftype_int_short, longlong_ftype_short_short;
  1247. +tree void_ftype_int_int_int_int_int, void_ftype_int_int_int;
  1248. +tree longlong_ftype_int_int, void_ftype_int_int_longlong;
  1249. +tree int_ftype_int_int_int, longlong_ftype_longlong_int_short;
  1250. +tree longlong_ftype_longlong_short_short, int_ftype_int_short_short;
  1251. +
  1252. +#define def_builtin(NAME, TYPE, CODE) \
  1253. + add_builtin_function ((NAME), (TYPE), (CODE), \
  1254. + BUILT_IN_MD, NULL, NULL_TREE)
  1255. +
  1256. +#define def_mbuiltin(MASK, NAME, TYPE, CODE) \
  1257. + do \
  1258. + { \
  1259. + if ((MASK)) \
  1260. + add_builtin_function ((NAME), (TYPE), (CODE), \
  1261. + BUILT_IN_MD, NULL, NULL_TREE); \
  1262. + } \
  1263. + while (0)
  1264. +
  1265. +struct builtin_description
  1266. +{
  1267. + const unsigned int mask;
  1268. + const enum insn_code icode;
  1269. + const char *const name;
  1270. + const int code;
  1271. + const enum rtx_code comparison;
  1272. + const unsigned int flag;
  1273. + const tree *ftype;
  1274. +};
  1275. +
  1276. +static const struct builtin_description bdesc_2arg[] = {
  1277. +
  1278. +#define DSP_BUILTIN(code, builtin, ftype) \
  1279. + { 1, CODE_FOR_##code, "__builtin_" #code , \
  1280. + AVR32_BUILTIN_##builtin, 0, 0, ftype }
  1281. +
  1282. + DSP_BUILTIN (mulsathh_h, MULSATHH_H, &short_ftype_short_short),
  1283. + DSP_BUILTIN (mulsathh_w, MULSATHH_W, &int_ftype_short_short),
  1284. + DSP_BUILTIN (mulsatrndhh_h, MULSATRNDHH_H, &short_ftype_short_short),
  1285. + DSP_BUILTIN (mulsatrndwh_w, MULSATRNDWH_W, &int_ftype_int_short),
  1286. + DSP_BUILTIN (mulsatwh_w, MULSATWH_W, &int_ftype_int_short),
  1287. + DSP_BUILTIN (satadd_h, SATADD_H, &short_ftype_short_short),
  1288. + DSP_BUILTIN (satsub_h, SATSUB_H, &short_ftype_short_short),
  1289. + DSP_BUILTIN (satadd_w, SATADD_W, &int_ftype_int_int),
  1290. + DSP_BUILTIN (satsub_w, SATSUB_W, &int_ftype_int_int),
  1291. + DSP_BUILTIN (mulwh_d, MULWH_D, &longlong_ftype_int_short),
  1292. + DSP_BUILTIN (mulnwh_d, MULNWH_D, &longlong_ftype_int_short)
  1293. +};
  1294. +
  1295. +
  1296. +void
  1297. +avr32_init_builtins (void)
  1298. +{
  1299. + unsigned int i;
  1300. + const struct builtin_description *d;
  1301. + tree endlink = void_list_node;
  1302. + tree int_endlink = tree_cons (NULL_TREE, integer_type_node, endlink);
  1303. + tree longlong_endlink =
  1304. + tree_cons (NULL_TREE, long_long_integer_type_node, endlink);
  1305. + tree short_endlink =
  1306. + tree_cons (NULL_TREE, short_integer_type_node, endlink);
  1307. + tree void_endlink = tree_cons (NULL_TREE, void_type_node, endlink);
  1308. +
  1309. + /* int func (int) */
  1310. + int_ftype_int = build_function_type (integer_type_node, int_endlink);
  1311. +
  1312. + /* short func (short) */
  1313. + short_ftype_short
  1314. + = build_function_type (short_integer_type_node, short_endlink);
  1315. +
  1316. + /* short func (short, short) */
  1317. + short_ftype_short_short
  1318. + = build_function_type (short_integer_type_node,
  1319. + tree_cons (NULL_TREE, short_integer_type_node,
  1320. + short_endlink));
  1321. +
  1322. + /* long long func (long long, short, short) */
  1323. + longlong_ftype_longlong_short_short
  1324. + = build_function_type (long_long_integer_type_node,
  1325. + tree_cons (NULL_TREE, long_long_integer_type_node,
  1326. + tree_cons (NULL_TREE,
  1327. + short_integer_type_node,
  1328. + short_endlink)));
  1329. +
  1330. + /* long long func (short, short) */
  1331. + longlong_ftype_short_short
  1332. + = build_function_type (long_long_integer_type_node,
  1333. + tree_cons (NULL_TREE, short_integer_type_node,
  1334. + short_endlink));
  1335. +
  1336. + /* int func (int, int) */
  1337. + int_ftype_int_int
  1338. + = build_function_type (integer_type_node,
  1339. + tree_cons (NULL_TREE, integer_type_node,
  1340. + int_endlink));
  1341. +
  1342. + /* long long func (int, int) */
  1343. + longlong_ftype_int_int
  1344. + = build_function_type (long_long_integer_type_node,
  1345. + tree_cons (NULL_TREE, integer_type_node,
  1346. + int_endlink));
  1347. +
  1348. + /* long long int func (long long, int, short) */
  1349. + longlong_ftype_longlong_int_short
  1350. + = build_function_type (long_long_integer_type_node,
  1351. + tree_cons (NULL_TREE, long_long_integer_type_node,
  1352. + tree_cons (NULL_TREE, integer_type_node,
  1353. + short_endlink)));
  1354. +
  1355. + /* long long int func (int, short) */
  1356. + longlong_ftype_int_short
  1357. + = build_function_type (long_long_integer_type_node,
  1358. + tree_cons (NULL_TREE, integer_type_node,
  1359. + short_endlink));
  1360. +
  1361. + /* int func (int, short, short) */
  1362. + int_ftype_int_short_short
  1363. + = build_function_type (integer_type_node,
  1364. + tree_cons (NULL_TREE, integer_type_node,
  1365. + tree_cons (NULL_TREE,
  1366. + short_integer_type_node,
  1367. + short_endlink)));
  1368. +
  1369. + /* int func (short, short) */
  1370. + int_ftype_short_short
  1371. + = build_function_type (integer_type_node,
  1372. + tree_cons (NULL_TREE, short_integer_type_node,
  1373. + short_endlink));
  1374. +
  1375. + /* int func (int, short) */
  1376. + int_ftype_int_short
  1377. + = build_function_type (integer_type_node,
  1378. + tree_cons (NULL_TREE, integer_type_node,
  1379. + short_endlink));
  1380. +
  1381. + /* void func (int, int) */
  1382. + void_ftype_int_int
  1383. + = build_function_type (void_type_node,
  1384. + tree_cons (NULL_TREE, integer_type_node,
  1385. + int_endlink));
  1386. +
  1387. + /* void func (int, int, int) */
  1388. + void_ftype_int_int_int
  1389. + = build_function_type (void_type_node,
  1390. + tree_cons (NULL_TREE, integer_type_node,
  1391. + tree_cons (NULL_TREE, integer_type_node,
  1392. + int_endlink)));
  1393. +
  1394. + /* void func (int, int, long long) */
  1395. + void_ftype_int_int_longlong
  1396. + = build_function_type (void_type_node,
  1397. + tree_cons (NULL_TREE, integer_type_node,
  1398. + tree_cons (NULL_TREE, integer_type_node,
  1399. + longlong_endlink)));
  1400. +
  1401. + /* void func (int, int, int, int, int) */
  1402. + void_ftype_int_int_int_int_int
  1403. + = build_function_type (void_type_node,
  1404. + tree_cons (NULL_TREE, integer_type_node,
  1405. + tree_cons (NULL_TREE, integer_type_node,
  1406. + tree_cons (NULL_TREE,
  1407. + integer_type_node,
  1408. + tree_cons
  1409. + (NULL_TREE,
  1410. + integer_type_node,
  1411. + int_endlink)))));
  1412. +
  1413. + /* void func (void *, int) */
  1414. + void_ftype_ptr_int
  1415. + = build_function_type (void_type_node,
  1416. + tree_cons (NULL_TREE, ptr_type_node, int_endlink));
  1417. +
  1418. + /* void func (int) */
  1419. + void_ftype_int = build_function_type (void_type_node, int_endlink);
  1420. +
  1421. + /* void func (ulong) */
  1422. + void_ftype_ulong = build_function_type_list (void_type_node,
  1423. + long_unsigned_type_node, NULL_TREE);
  1424. +
  1425. + /* void func (void) */
  1426. + void_ftype_void = build_function_type (void_type_node, void_endlink);
  1427. +
  1428. + /* int func (void) */
  1429. + int_ftype_void = build_function_type (integer_type_node, void_endlink);
  1430. +
  1431. + /* int func (void *, int) */
  1432. + int_ftype_ptr_int
  1433. + = build_function_type (integer_type_node,
  1434. + tree_cons (NULL_TREE, ptr_type_node, int_endlink));
  1435. +
  1436. + /* int func (int, int, int) */
  1437. + int_ftype_int_int_int
  1438. + = build_function_type (integer_type_node,
  1439. + tree_cons (NULL_TREE, integer_type_node,
  1440. + tree_cons (NULL_TREE, integer_type_node,
  1441. + int_endlink)));
  1442. +
  1443. + /* Initialize avr32 builtins. */
  1444. + def_builtin ("__builtin_mfsr", int_ftype_int, AVR32_BUILTIN_MFSR);
  1445. + def_builtin ("__builtin_mtsr", void_ftype_int_int, AVR32_BUILTIN_MTSR);
  1446. + def_builtin ("__builtin_mfdr", int_ftype_int, AVR32_BUILTIN_MFDR);
  1447. + def_builtin ("__builtin_mtdr", void_ftype_int_int, AVR32_BUILTIN_MTDR);
  1448. + def_builtin ("__builtin_cache", void_ftype_ptr_int, AVR32_BUILTIN_CACHE);
  1449. + def_builtin ("__builtin_sync", void_ftype_int, AVR32_BUILTIN_SYNC);
  1450. + def_builtin ("__builtin_ssrf", void_ftype_int, AVR32_BUILTIN_SSRF);
  1451. + def_builtin ("__builtin_csrf", void_ftype_int, AVR32_BUILTIN_CSRF);
  1452. + def_builtin ("__builtin_tlbr", void_ftype_void, AVR32_BUILTIN_TLBR);
  1453. + def_builtin ("__builtin_tlbs", void_ftype_void, AVR32_BUILTIN_TLBS);
  1454. + def_builtin ("__builtin_tlbw", void_ftype_void, AVR32_BUILTIN_TLBW);
  1455. + def_builtin ("__builtin_breakpoint", void_ftype_void,
  1456. + AVR32_BUILTIN_BREAKPOINT);
  1457. + def_builtin ("__builtin_xchg", int_ftype_ptr_int, AVR32_BUILTIN_XCHG);
  1458. + def_builtin ("__builtin_ldxi", int_ftype_ptr_int, AVR32_BUILTIN_LDXI);
  1459. + def_builtin ("__builtin_bswap_16", short_ftype_short,
  1460. + AVR32_BUILTIN_BSWAP16);
  1461. + def_builtin ("__builtin_bswap_32", int_ftype_int, AVR32_BUILTIN_BSWAP32);
  1462. + def_builtin ("__builtin_cop", void_ftype_int_int_int_int_int,
  1463. + AVR32_BUILTIN_COP);
  1464. + def_builtin ("__builtin_mvcr_w", int_ftype_int_int, AVR32_BUILTIN_MVCR_W);
  1465. + def_builtin ("__builtin_mvrc_w", void_ftype_int_int_int,
  1466. + AVR32_BUILTIN_MVRC_W);
  1467. + def_builtin ("__builtin_mvcr_d", longlong_ftype_int_int,
  1468. + AVR32_BUILTIN_MVCR_D);
  1469. + def_builtin ("__builtin_mvrc_d", void_ftype_int_int_longlong,
  1470. + AVR32_BUILTIN_MVRC_D);
  1471. + def_builtin ("__builtin_sats", int_ftype_int_int_int, AVR32_BUILTIN_SATS);
  1472. + def_builtin ("__builtin_satu", int_ftype_int_int_int, AVR32_BUILTIN_SATU);
  1473. + def_builtin ("__builtin_satrnds", int_ftype_int_int_int,
  1474. + AVR32_BUILTIN_SATRNDS);
  1475. + def_builtin ("__builtin_satrndu", int_ftype_int_int_int,
  1476. + AVR32_BUILTIN_SATRNDU);
  1477. + def_builtin ("__builtin_musfr", void_ftype_int, AVR32_BUILTIN_MUSFR);
  1478. + def_builtin ("__builtin_mustr", int_ftype_void, AVR32_BUILTIN_MUSTR);
  1479. + def_builtin ("__builtin_macsathh_w", int_ftype_int_short_short,
  1480. + AVR32_BUILTIN_MACSATHH_W);
  1481. + def_builtin ("__builtin_macwh_d", longlong_ftype_longlong_int_short,
  1482. + AVR32_BUILTIN_MACWH_D);
  1483. + def_builtin ("__builtin_machh_d", longlong_ftype_longlong_short_short,
  1484. + AVR32_BUILTIN_MACHH_D);
  1485. + def_builtin ("__builtin_mems", void_ftype_ptr_int, AVR32_BUILTIN_MEMS);
  1486. + def_builtin ("__builtin_memt", void_ftype_ptr_int, AVR32_BUILTIN_MEMT);
  1487. + def_builtin ("__builtin_memc", void_ftype_ptr_int, AVR32_BUILTIN_MEMC);
  1488. + def_builtin ("__builtin_sleep", void_ftype_int, AVR32_BUILTIN_SLEEP);
  1489. + def_builtin ("__builtin_avr32_delay_cycles", void_ftype_int, AVR32_BUILTIN_DELAY_CYCLES);
  1490. +
  1491. + /* Add all builtins that are more or less simple operations on two
  1492. + operands. */
  1493. + for (i = 0, d = bdesc_2arg; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
  1494. + {
  1495. + /* Use one of the operands; the target can have a different mode for
  1496. + mask-generating compares. */
  1497. +
  1498. + if (d->name == 0)
  1499. + continue;
  1500. +
  1501. + def_mbuiltin (d->mask, d->name, *(d->ftype), d->code);
  1502. + }
  1503. +}
  1504. +
  1505. +
  1506. +/* Subroutine of avr32_expand_builtin to take care of binop insns. */
  1507. +static rtx
  1508. +avr32_expand_binop_builtin (enum insn_code icode, tree exp, rtx target)
  1509. +{
  1510. + rtx pat;
  1511. + tree arg0 = CALL_EXPR_ARG (exp,0);
  1512. + tree arg1 = CALL_EXPR_ARG (exp,1);
  1513. + rtx op0 = expand_normal (arg0);
  1514. + rtx op1 = expand_normal (arg1);
  1515. + enum machine_mode tmode = insn_data[icode].operand[0].mode;
  1516. + enum machine_mode mode0 = insn_data[icode].operand[1].mode;
  1517. + enum machine_mode mode1 = insn_data[icode].operand[2].mode;
  1518. +
  1519. + if (!target
  1520. + || GET_MODE (target) != tmode
  1521. + || !(*insn_data[icode].operand[0].predicate) (target, tmode))
  1522. + target = gen_reg_rtx (tmode);
  1523. +
  1524. + /* In case the insn wants input operands in modes different from the
  1525. + result, abort. */
  1526. + if (!(*insn_data[icode].operand[1].predicate) (op0, mode0))
  1527. + {
  1528. + /* If op0 is already a reg we must cast it to the correct mode. */
  1529. + if (REG_P (op0))
  1530. + op0 = convert_to_mode (mode0, op0, 1);
  1531. + else
  1532. + op0 = copy_to_mode_reg (mode0, op0);
  1533. + }
  1534. + if (!(*insn_data[icode].operand[2].predicate) (op1, mode1))
  1535. + {
  1536. + /* If op1 is already a reg we must cast it to the correct mode. */
  1537. + if (REG_P (op1))
  1538. + op1 = convert_to_mode (mode1, op1, 1);
  1539. + else
  1540. + op1 = copy_to_mode_reg (mode1, op1);
  1541. + }
  1542. + pat = GEN_FCN (icode) (target, op0, op1);
  1543. + if (!pat)
  1544. + return 0;
  1545. + emit_insn (pat);
  1546. + return target;
  1547. +}
  1548. +
  1549. +
  1550. +/* Expand an expression EXP that calls a built-in function,
  1551. + with result going to TARGET if that's convenient
  1552. + (and in mode MODE if that's convenient).
  1553. + SUBTARGET may be used as the target for computing one of EXP's operands.
  1554. + IGNORE is nonzero if the value is to be ignored. */
  1555. +rtx
  1556. +avr32_expand_builtin (tree exp,
  1557. + rtx target,
  1558. + rtx subtarget ATTRIBUTE_UNUSED,
  1559. + enum machine_mode mode ATTRIBUTE_UNUSED,
  1560. + int ignore ATTRIBUTE_UNUSED)
  1561. +{
  1562. + const struct builtin_description *d;
  1563. + unsigned int i;
  1564. + enum insn_code icode = 0;
  1565. + tree fndecl = TREE_OPERAND (CALL_EXPR_FN (exp), 0);
  1566. + tree arg0, arg1, arg2;
  1567. + rtx op0, op1, op2, pat;
  1568. + enum machine_mode tmode, mode0, mode1;
  1569. + enum machine_mode arg0_mode;
  1570. + int fcode = DECL_FUNCTION_CODE (fndecl);
  1571. +
  1572. + switch (fcode)
  1573. + {
  1574. + default:
  1575. + break;
  1576. +
  1577. + case AVR32_BUILTIN_SATS:
  1578. + case AVR32_BUILTIN_SATU:
  1579. + case AVR32_BUILTIN_SATRNDS:
  1580. + case AVR32_BUILTIN_SATRNDU:
  1581. + {
  1582. + const char *fname;
  1583. + switch (fcode)
  1584. + {
  1585. + default:
  1586. + case AVR32_BUILTIN_SATS:
  1587. + icode = CODE_FOR_sats;
  1588. + fname = "sats";
  1589. + break;
  1590. + case AVR32_BUILTIN_SATU:
  1591. + icode = CODE_FOR_satu;
  1592. + fname = "satu";
  1593. + break;
  1594. + case AVR32_BUILTIN_SATRNDS:
  1595. + icode = CODE_FOR_satrnds;
  1596. + fname = "satrnds";
  1597. + break;
  1598. + case AVR32_BUILTIN_SATRNDU:
  1599. + icode = CODE_FOR_satrndu;
  1600. + fname = "satrndu";
  1601. + break;
  1602. + }
  1603. +
  1604. + arg0 = CALL_EXPR_ARG (exp,0);
  1605. + arg1 = CALL_EXPR_ARG (exp,1);
  1606. + arg2 = CALL_EXPR_ARG (exp,2);
  1607. + op0 = expand_normal (arg0);
  1608. + op1 = expand_normal (arg1);
  1609. + op2 = expand_normal (arg2);
  1610. +
  1611. + tmode = insn_data[icode].operand[0].mode;
  1612. +
  1613. +
  1614. + if (target == 0
  1615. + || GET_MODE (target) != tmode
  1616. + || !(*insn_data[icode].operand[0].predicate) (target, tmode))
  1617. + target = gen_reg_rtx (tmode);
  1618. +
  1619. +
  1620. + if (!(*insn_data[icode].operand[0].predicate) (op0, GET_MODE (op0)))
  1621. + {
  1622. + op0 = copy_to_mode_reg (insn_data[icode].operand[0].mode, op0);
  1623. + }
  1624. +
  1625. + if (!(*insn_data[icode].operand[1].predicate) (op1, SImode))
  1626. + {
  1627. + error ("Parameter 2 to __builtin_%s should be a constant number.",
  1628. + fname);
  1629. + return NULL_RTX;
  1630. + }
  1631. +
  1632. + if (!(*insn_data[icode].operand[1].predicate) (op2, SImode))
  1633. + {
  1634. + error ("Parameter 3 to __builtin_%s should be a constant number.",
  1635. + fname);
  1636. + return NULL_RTX;
  1637. + }
  1638. +
  1639. + emit_move_insn (target, op0);
  1640. + pat = GEN_FCN (icode) (target, op1, op2);
  1641. + if (!pat)
  1642. + return 0;
  1643. + emit_insn (pat);
  1644. +
  1645. + return target;
  1646. + }
  1647. + case AVR32_BUILTIN_MUSTR:
  1648. + icode = CODE_FOR_mustr;
  1649. + tmode = insn_data[icode].operand[0].mode;
  1650. +
  1651. + if (target == 0
  1652. + || GET_MODE (target) != tmode
  1653. + || !(*insn_data[icode].operand[0].predicate) (target, tmode))
  1654. + target = gen_reg_rtx (tmode);
  1655. + pat = GEN_FCN (icode) (target);
  1656. + if (!pat)
  1657. + return 0;
  1658. + emit_insn (pat);
  1659. + return target;
  1660. +
  1661. + case AVR32_BUILTIN_MFSR:
  1662. + icode = CODE_FOR_mfsr;
  1663. + arg0 = CALL_EXPR_ARG (exp,0);
  1664. + op0 = expand_normal (arg0);
  1665. + tmode = insn_data[icode].operand[0].mode;
  1666. + mode0 = insn_data[icode].operand[1].mode;
  1667. +
  1668. + if (!(*insn_data[icode].operand[1].predicate) (op0, mode0))
  1669. + {
  1670. + error ("Parameter 1 to __builtin_mfsr must be a constant number");
  1671. + }
  1672. +
  1673. + if (target == 0
  1674. + || GET_MODE (target) != tmode
  1675. + || !(*insn_data[icode].operand[0].predicate) (target, tmode))
  1676. + target = gen_reg_rtx (tmode);
  1677. + pat = GEN_FCN (icode) (target, op0);
  1678. + if (!pat)
  1679. + return 0;
  1680. + emit_insn (pat);
  1681. + return target;
  1682. + case AVR32_BUILTIN_MTSR:
  1683. + icode = CODE_FOR_mtsr;
  1684. + arg0 = CALL_EXPR_ARG (exp,0);
  1685. + arg1 = CALL_EXPR_ARG (exp,1);
  1686. + op0 = expand_normal (arg0);
  1687. + op1 = expand_normal (arg1);
  1688. + mode0 = insn_data[icode].operand[0].mode;
  1689. + mode1 = insn_data[icode].operand[1].mode;
  1690. +
  1691. + if (!(*insn_data[icode].operand[0].predicate) (op0, mode0))
  1692. + {
  1693. + error ("Parameter 1 to __builtin_mtsr must be a constant number");
  1694. + return gen_reg_rtx (mode0);
  1695. + }
  1696. + if (!(*insn_data[icode].operand[1].predicate) (op1, mode1))
  1697. + op1 = copy_to_mode_reg (mode1, op1);
  1698. + pat = GEN_FCN (icode) (op0, op1);
  1699. + if (!pat)
  1700. + return 0;
  1701. + emit_insn (pat);
  1702. + return NULL_RTX;
  1703. + case AVR32_BUILTIN_MFDR:
  1704. + icode = CODE_FOR_mfdr;
  1705. + arg0 = CALL_EXPR_ARG (exp,0);
  1706. + op0 = expand_normal (arg0);
  1707. + tmode = insn_data[icode].operand[0].mode;
  1708. + mode0 = insn_data[icode].operand[1].mode;
  1709. +
  1710. + if (!(*insn_data[icode].operand[1].predicate) (op0, mode0))
  1711. + {
  1712. + error ("Parameter 1 to __builtin_mfdr must be a constant number");
  1713. + }
  1714. +
  1715. + if (target == 0
  1716. + || GET_MODE (target) != tmode
  1717. + || !(*insn_data[icode].operand[0].predicate) (target, tmode))
  1718. + target = gen_reg_rtx (tmode);
  1719. + pat = GEN_FCN (icode) (target, op0);
  1720. + if (!pat)
  1721. + return 0;
  1722. + emit_insn (pat);
  1723. + return target;
  1724. + case AVR32_BUILTIN_MTDR:
  1725. + icode = CODE_FOR_mtdr;
  1726. + arg0 = CALL_EXPR_ARG (exp,0);
  1727. + arg1 = CALL_EXPR_ARG (exp,1);
  1728. + op0 = expand_normal (arg0);
  1729. + op1 = expand_normal (arg1);
  1730. + mode0 = insn_data[icode].operand[0].mode;
  1731. + mode1 = insn_data[icode].operand[1].mode;
  1732. +
  1733. + if (!(*insn_data[icode].operand[0].predicate) (op0, mode0))
  1734. + {
  1735. + error ("Parameter 1 to __builtin_mtdr must be a constant number");
  1736. + return gen_reg_rtx (mode0);
  1737. + }
  1738. + if (!(*insn_data[icode].operand[1].predicate) (op1, mode1))
  1739. + op1 = copy_to_mode_reg (mode1, op1);
  1740. + pat = GEN_FCN (icode) (op0, op1);
  1741. + if (!pat)
  1742. + return 0;
  1743. + emit_insn (pat);
  1744. + return NULL_RTX;
  1745. + case AVR32_BUILTIN_CACHE:
  1746. + icode = CODE_FOR_cache;
  1747. + arg0 = CALL_EXPR_ARG (exp,0);
  1748. + arg1 = CALL_EXPR_ARG (exp,1);
  1749. + op0 = expand_normal (arg0);
  1750. + op1 = expand_normal (arg1);
  1751. + mode0 = insn_data[icode].operand[0].mode;
  1752. + mode1 = insn_data[icode].operand[1].mode;
  1753. +
  1754. + if (!(*insn_data[icode].operand[1].predicate) (op1, mode1))
  1755. + {
  1756. + error ("Parameter 2 to __builtin_cache must be a constant number");
  1757. + return gen_reg_rtx (mode1);
  1758. + }
  1759. +
  1760. + if (!(*insn_data[icode].operand[0].predicate) (op0, mode0))
  1761. + op0 = copy_to_mode_reg (mode0, op0);
  1762. +
  1763. + pat = GEN_FCN (icode) (op0, op1);
  1764. + if (!pat)
  1765. + return 0;
  1766. + emit_insn (pat);
  1767. + return NULL_RTX;
  1768. + case AVR32_BUILTIN_SYNC:
  1769. + case AVR32_BUILTIN_MUSFR:
  1770. + case AVR32_BUILTIN_SSRF:
  1771. + case AVR32_BUILTIN_CSRF:
  1772. + {
  1773. + const char *fname;
  1774. + switch (fcode)
  1775. + {
  1776. + default:
  1777. + case AVR32_BUILTIN_SYNC:
  1778. + icode = CODE_FOR_sync;
  1779. + fname = "sync";
  1780. + break;
  1781. + case AVR32_BUILTIN_MUSFR:
  1782. + icode = CODE_FOR_musfr;
  1783. + fname = "musfr";
  1784. + break;
  1785. + case AVR32_BUILTIN_SSRF:
  1786. + icode = CODE_FOR_ssrf;
  1787. + fname = "ssrf";
  1788. + break;
  1789. + case AVR32_BUILTIN_CSRF:
  1790. + icode = CODE_FOR_csrf;
  1791. + fname = "csrf";
  1792. + break;
  1793. + }
  1794. +
  1795. + arg0 = CALL_EXPR_ARG (exp,0);
  1796. + op0 = expand_normal (arg0);
  1797. + mode0 = insn_data[icode].operand[0].mode;
  1798. +
  1799. + if (!(*insn_data[icode].operand[0].predicate) (op0, mode0))
  1800. + {
  1801. + if (icode == CODE_FOR_musfr)
  1802. + op0 = copy_to_mode_reg (mode0, op0);
  1803. + else
  1804. + {
  1805. + error ("Parameter to __builtin_%s is illegal.", fname);
  1806. + return gen_reg_rtx (mode0);
  1807. + }
  1808. + }
  1809. + pat = GEN_FCN (icode) (op0);
  1810. + if (!pat)
  1811. + return 0;
  1812. + emit_insn (pat);
  1813. + return NULL_RTX;
  1814. + }
  1815. + case AVR32_BUILTIN_TLBR:
  1816. + icode = CODE_FOR_tlbr;
  1817. + pat = GEN_FCN (icode) (NULL_RTX);
  1818. + if (!pat)
  1819. + return 0;
  1820. + emit_insn (pat);
  1821. + return NULL_RTX;
  1822. + case AVR32_BUILTIN_TLBS:
  1823. + icode = CODE_FOR_tlbs;
  1824. + pat = GEN_FCN (icode) (NULL_RTX);
  1825. + if (!pat)
  1826. + return 0;
  1827. + emit_insn (pat);
  1828. + return NULL_RTX;
  1829. + case AVR32_BUILTIN_TLBW:
  1830. + icode = CODE_FOR_tlbw;
  1831. + pat = GEN_FCN (icode) (NULL_RTX);
  1832. + if (!pat)
  1833. + return 0;
  1834. + emit_insn (pat);
  1835. + return NULL_RTX;
  1836. + case AVR32_BUILTIN_BREAKPOINT:
  1837. + icode = CODE_FOR_breakpoint;
  1838. + pat = GEN_FCN (icode) (NULL_RTX);
  1839. + if (!pat)
  1840. + return 0;
  1841. + emit_insn (pat);
  1842. + return NULL_RTX;
  1843. + case AVR32_BUILTIN_XCHG:
  1844. + icode = CODE_FOR_sync_lock_test_and_setsi;
  1845. + arg0 = CALL_EXPR_ARG (exp,0);
  1846. + arg1 = CALL_EXPR_ARG (exp,1);
  1847. + op0 = expand_normal (arg0);
  1848. + op1 = expand_normal (arg1);
  1849. + tmode = insn_data[icode].operand[0].mode;
  1850. + mode0 = insn_data[icode].operand[1].mode;
  1851. + mode1 = insn_data[icode].operand[2].mode;
  1852. +
  1853. + if (!(*insn_data[icode].operand[2].predicate) (op1, mode1))
  1854. + {
  1855. + op1 = copy_to_mode_reg (mode1, op1);
  1856. + }
  1857. +
  1858. + op0 = force_reg (GET_MODE (op0), op0);
  1859. + op0 = gen_rtx_MEM (GET_MODE (op0), op0);
  1860. + if (!(*insn_data[icode].operand[1].predicate) (op0, mode0))
  1861. + {
  1862. + error
  1863. + ("Parameter 1 to __builtin_xchg must be a pointer to an integer.");
  1864. + }
  1865. +
  1866. + if (target == 0
  1867. + || GET_MODE (target) != tmode
  1868. + || !(*insn_data[icode].operand[0].predicate) (target, tmode))
  1869. + target = gen_reg_rtx (tmode);
  1870. + pat = GEN_FCN (icode) (target, op0, op1);
  1871. + if (!pat)
  1872. + return 0;
  1873. + emit_insn (pat);
  1874. + return target;
  1875. + case AVR32_BUILTIN_LDXI:
  1876. + icode = CODE_FOR_ldxi;
  1877. + arg0 = CALL_EXPR_ARG (exp,0);
  1878. + arg1 = CALL_EXPR_ARG (exp,1);
  1879. + arg2 = CALL_EXPR_ARG (exp,2);
  1880. + op0 = expand_normal (arg0);
  1881. + op1 = expand_normal (arg1);
  1882. + op2 = expand_normal (arg2);
  1883. + tmode = insn_data[icode].operand[0].mode;
  1884. + mode0 = insn_data[icode].operand[1].mode;
  1885. + mode1 = insn_data[icode].operand[2].mode;
  1886. +
  1887. + if (!(*insn_data[icode].operand[1].predicate) (op0, mode0))
  1888. + {
  1889. + op0 = copy_to_mode_reg (mode0, op0);
  1890. + }
  1891. +
  1892. + if (!(*insn_data[icode].operand[2].predicate) (op1, mode1))
  1893. + {
  1894. + op1 = copy_to_mode_reg (mode1, op1);
  1895. + }
  1896. +
  1897. + if (!(*insn_data[icode].operand[3].predicate) (op2, SImode))
  1898. + {
  1899. + error
  1900. + ("Parameter 3 to __builtin_ldxi must be a valid extract shift operand: (0|8|16|24)");
  1901. + return gen_reg_rtx (mode0);
  1902. + }
  1903. +
  1904. + if (target == 0
  1905. + || GET_MODE (target) != tmode
  1906. + || !(*insn_data[icode].operand[0].predicate) (target, tmode))
  1907. + target = gen_reg_rtx (tmode);
  1908. + pat = GEN_FCN (icode) (target, op0, op1, op2);
  1909. + if (!pat)
  1910. + return 0;
  1911. + emit_insn (pat);
  1912. + return target;
  1913. + case AVR32_BUILTIN_BSWAP16:
  1914. + {
  1915. + icode = CODE_FOR_bswap_16;
  1916. + arg0 = CALL_EXPR_ARG (exp,0);
  1917. + arg0_mode = TYPE_MODE (TREE_TYPE (arg0));
  1918. + mode0 = insn_data[icode].operand[1].mode;
  1919. + if (arg0_mode != mode0)
  1920. + arg0 = build1 (NOP_EXPR,
  1921. + (*lang_hooks.types.type_for_mode) (mode0, 0), arg0);
  1922. +
  1923. + op0 = expand_expr (arg0, NULL_RTX, HImode, 0);
  1924. + tmode = insn_data[icode].operand[0].mode;
  1925. +
  1926. +
  1927. + if (!(*insn_data[icode].operand[1].predicate) (op0, mode0))
  1928. + {
  1929. + if ( CONST_INT_P (op0) )
  1930. + {
  1931. + HOST_WIDE_INT val = ( ((INTVAL (op0)&0x00ff) << 8) |
  1932. + ((INTVAL (op0)&0xff00) >> 8) );
  1933. + /* Sign extend 16-bit value to host wide int */
  1934. + val <<= (HOST_BITS_PER_WIDE_INT - 16);
  1935. + val >>= (HOST_BITS_PER_WIDE_INT - 16);
  1936. + op0 = GEN_INT(val);
  1937. + if (target == 0
  1938. + || GET_MODE (target) != tmode
  1939. + || !(*insn_data[icode].operand[0].predicate) (target, tmode))
  1940. + target = gen_reg_rtx (tmode);
  1941. + emit_move_insn(target, op0);
  1942. + return target;
  1943. + }
  1944. + else
  1945. + op0 = copy_to_mode_reg (mode0, op0);
  1946. + }
  1947. +
  1948. + if (target == 0
  1949. + || GET_MODE (target) != tmode
  1950. + || !(*insn_data[icode].operand[0].predicate) (target, tmode))
  1951. + {
  1952. + target = gen_reg_rtx (tmode);
  1953. + }
  1954. +
  1955. +
  1956. + pat = GEN_FCN (icode) (target, op0);
  1957. + if (!pat)
  1958. + return 0;
  1959. + emit_insn (pat);
  1960. +
  1961. + return target;
  1962. + }
  1963. + case AVR32_BUILTIN_BSWAP32:
  1964. + {
  1965. + icode = CODE_FOR_bswap_32;
  1966. + arg0 = CALL_EXPR_ARG (exp,0);
  1967. + op0 = expand_normal (arg0);
  1968. + tmode = insn_data[icode].operand[0].mode;
  1969. + mode0 = insn_data[icode].operand[1].mode;
  1970. +
  1971. + if (!(*insn_data[icode].operand[1].predicate) (op0, mode0))
  1972. + {
  1973. + if ( CONST_INT_P (op0) )
  1974. + {
  1975. + HOST_WIDE_INT val = ( ((INTVAL (op0)&0x000000ff) << 24) |
  1976. + ((INTVAL (op0)&0x0000ff00) << 8) |
  1977. + ((INTVAL (op0)&0x00ff0000) >> 8) |
  1978. + ((INTVAL (op0)&0xff000000) >> 24) );
  1979. + /* Sign extend 32-bit value to host wide int */
  1980. + val <<= (HOST_BITS_PER_WIDE_INT - 32);
  1981. + val >>= (HOST_BITS_PER_WIDE_INT - 32);
  1982. + op0 = GEN_INT(val);
  1983. + if (target == 0
  1984. + || GET_MODE (target) != tmode
  1985. + || !(*insn_data[icode].operand[0].predicate) (target, tmode))
  1986. + target = gen_reg_rtx (tmode);
  1987. + emit_move_insn(target, op0);
  1988. + return target;
  1989. + }
  1990. + else
  1991. + op0 = copy_to_mode_reg (mode0, op0);
  1992. + }
  1993. +
  1994. + if (target == 0
  1995. + || GET_MODE (target) != tmode
  1996. + || !(*insn_data[icode].operand[0].predicate) (target, tmode))
  1997. + target = gen_reg_rtx (tmode);
  1998. +
  1999. +
  2000. + pat = GEN_FCN (icode) (target, op0);
  2001. + if (!pat)
  2002. + return 0;
  2003. + emit_insn (pat);
  2004. +
  2005. + return target;
  2006. + }
  2007. + case AVR32_BUILTIN_MVCR_W:
  2008. + case AVR32_BUILTIN_MVCR_D:
  2009. + {
  2010. + arg0 = CALL_EXPR_ARG (exp,0);
  2011. + arg1 = CALL_EXPR_ARG (exp,1);
  2012. + op0 = expand_normal (arg0);
  2013. + op1 = expand_normal (arg1);
  2014. +
  2015. + if (fcode == AVR32_BUILTIN_MVCR_W)
  2016. + icode = CODE_FOR_mvcrsi;
  2017. + else
  2018. + icode = CODE_FOR_mvcrdi;
  2019. +
  2020. + tmode = insn_data[icode].operand[0].mode;
  2021. +
  2022. + if (target == 0
  2023. + || GET_MODE (target) != tmode
  2024. + || !(*insn_data[icode].operand[0].predicate) (target, tmode))
  2025. + target = gen_reg_rtx (tmode);
  2026. +
  2027. + if (!(*insn_data[icode].operand[1].predicate) (op0, SImode))
  2028. + {
  2029. + error
  2030. + ("Parameter 1 to __builtin_cop is not a valid coprocessor number.");
  2031. + error ("Number should be between 0 and 7.");
  2032. + return NULL_RTX;
  2033. + }
  2034. +
  2035. + if (!(*insn_data[icode].operand[2].predicate) (op1, SImode))
  2036. + {
  2037. + error
  2038. + ("Parameter 2 to __builtin_cop is not a valid coprocessor register number.");
  2039. + error ("Number should be between 0 and 15.");
  2040. + return NULL_RTX;
  2041. + }
  2042. +
  2043. + pat = GEN_FCN (icode) (target, op0, op1);
  2044. + if (!pat)
  2045. + return 0;
  2046. + emit_insn (pat);
  2047. +
  2048. + return target;
  2049. + }
  2050. + case AVR32_BUILTIN_MACSATHH_W:
  2051. + case AVR32_BUILTIN_MACWH_D:
  2052. + case AVR32_BUILTIN_MACHH_D:
  2053. + {
  2054. + arg0 = CALL_EXPR_ARG (exp,0);
  2055. + arg1 = CALL_EXPR_ARG (exp,1);
  2056. + arg2 = CALL_EXPR_ARG (exp,2);
  2057. + op0 = expand_normal (arg0);
  2058. + op1 = expand_normal (arg1);
  2059. + op2 = expand_normal (arg2);
  2060. +
  2061. + icode = ((fcode == AVR32_BUILTIN_MACSATHH_W) ? CODE_FOR_macsathh_w :
  2062. + (fcode == AVR32_BUILTIN_MACWH_D) ? CODE_FOR_macwh_d :
  2063. + CODE_FOR_machh_d);
  2064. +
  2065. + tmode = insn_data[icode].operand[0].mode;
  2066. + mode0 = insn_data[icode].operand[1].mode;
  2067. + mode1 = insn_data[icode].operand[2].mode;
  2068. +
  2069. +
  2070. + if (!target
  2071. + || GET_MODE (target) != tmode
  2072. + || !(*insn_data[icode].operand[0].predicate) (target, tmode))
  2073. + target = gen_reg_rtx (tmode);
  2074. +
  2075. + if (!(*insn_data[icode].operand[0].predicate) (op0, tmode))
  2076. + {
  2077. + /* If op0 is already a reg we must cast it to the correct mode. */
  2078. + if (REG_P (op0))
  2079. + op0 = convert_to_mode (tmode, op0, 1);
  2080. + else
  2081. + op0 = copy_to_mode_reg (tmode, op0);
  2082. + }
  2083. +
  2084. + if (!(*insn_data[icode].operand[1].predicate) (op1, mode0))
  2085. + {
  2086. + /* If op1 is already a reg we must cast it to the correct mode. */
  2087. + if (REG_P (op1))
  2088. + op1 = convert_to_mode (mode0, op1, 1);
  2089. + else
  2090. + op1 = copy_to_mode_reg (mode0, op1);
  2091. + }
  2092. +
  2093. + if (!(*insn_data[icode].operand[2].predicate) (op2, mode1))
  2094. + {
  2095. + /* If op1 is already a reg we must cast it to the correct mode. */
  2096. + if (REG_P (op2))
  2097. + op2 = convert_to_mode (mode1, op2, 1);
  2098. + else
  2099. + op2 = copy_to_mode_reg (mode1, op2);
  2100. + }
  2101. +
  2102. + emit_move_insn (target, op0);
  2103. +
  2104. + pat = GEN_FCN (icode) (target, op1, op2);
  2105. + if (!pat)
  2106. + return 0;
  2107. + emit_insn (pat);
  2108. + return target;
  2109. + }
  2110. + case AVR32_BUILTIN_MVRC_W:
  2111. + case AVR32_BUILTIN_MVRC_D:
  2112. + {
  2113. + arg0 = CALL_EXPR_ARG (exp,0);
  2114. + arg1 = CALL_EXPR_ARG (exp,1);
  2115. + arg2 = CALL_EXPR_ARG (exp,2);
  2116. + op0 = expand_normal (arg0);
  2117. + op1 = expand_normal (arg1);
  2118. + op2 = expand_normal (arg2);
  2119. +
  2120. + if (fcode == AVR32_BUILTIN_MVRC_W)
  2121. + icode = CODE_FOR_mvrcsi;
  2122. + else
  2123. + icode = CODE_FOR_mvrcdi;
  2124. +
  2125. + if (!(*insn_data[icode].operand[0].predicate) (op0, SImode))
  2126. + {
  2127. + error ("Parameter 1 is not a valid coprocessor number.");
  2128. + error ("Number should be between 0 and 7.");
  2129. + return NULL_RTX;
  2130. + }
  2131. +
  2132. + if (!(*insn_data[icode].operand[1].predicate) (op1, SImode))
  2133. + {
  2134. + error ("Parameter 2 is not a valid coprocessor register number.");
  2135. + error ("Number should be between 0 and 15.");
  2136. + return NULL_RTX;
  2137. + }
  2138. +
  2139. + if (GET_CODE (op2) == CONST_INT
  2140. + || GET_CODE (op2) == CONST
  2141. + || GET_CODE (op2) == SYMBOL_REF || GET_CODE (op2) == LABEL_REF)
  2142. + {
  2143. + op2 = force_const_mem (insn_data[icode].operand[2].mode, op2);
  2144. + }
  2145. +
  2146. + if (!(*insn_data[icode].operand[2].predicate) (op2, GET_MODE (op2)))
  2147. + op2 = copy_to_mode_reg (insn_data[icode].operand[2].mode, op2);
  2148. +
  2149. +
  2150. + pat = GEN_FCN (icode) (op0, op1, op2);
  2151. + if (!pat)
  2152. + return 0;
  2153. + emit_insn (pat);
  2154. +
  2155. + return NULL_RTX;
  2156. + }
  2157. + case AVR32_BUILTIN_COP:
  2158. + {
  2159. + rtx op3, op4;
  2160. + tree arg3, arg4;
  2161. + icode = CODE_FOR_cop;
  2162. + arg0 = CALL_EXPR_ARG (exp,0);
  2163. + arg1 = CALL_EXPR_ARG (exp,1);
  2164. + arg2 = CALL_EXPR_ARG (exp,2);
  2165. + arg3 = CALL_EXPR_ARG (exp,3);
  2166. + arg4 = CALL_EXPR_ARG (exp,4);
  2167. + op0 = expand_normal (arg0);
  2168. + op1 = expand_normal (arg1);
  2169. + op2 = expand_normal (arg2);
  2170. + op3 = expand_normal (arg3);
  2171. + op4 = expand_normal (arg4);
  2172. +
  2173. + if (!(*insn_data[icode].operand[0].predicate) (op0, SImode))
  2174. + {
  2175. + error
  2176. + ("Parameter 1 to __builtin_cop is not a valid coprocessor number.");
  2177. + error ("Number should be between 0 and 7.");
  2178. + return NULL_RTX;
  2179. + }
  2180. +
  2181. + if (!(*insn_data[icode].operand[1].predicate) (op1, SImode))
  2182. + {
  2183. + error
  2184. + ("Parameter 2 to __builtin_cop is not a valid coprocessor register number.");
  2185. + error ("Number should be between 0 and 15.");
  2186. + return NULL_RTX;
  2187. + }
  2188. +
  2189. + if (!(*insn_data[icode].operand[2].predicate) (op2, SImode))
  2190. + {
  2191. + error
  2192. + ("Parameter 3 to __builtin_cop is not a valid coprocessor register number.");
  2193. + error ("Number should be between 0 and 15.");
  2194. + return NULL_RTX;
  2195. + }
  2196. +
  2197. + if (!(*insn_data[icode].operand[3].predicate) (op3, SImode))
  2198. + {
  2199. + error
  2200. + ("Parameter 4 to __builtin_cop is not a valid coprocessor register number.");
  2201. + error ("Number should be between 0 and 15.");
  2202. + return NULL_RTX;
  2203. + }
  2204. +
  2205. + if (!(*insn_data[icode].operand[4].predicate) (op4, SImode))
  2206. + {
  2207. + error
  2208. + ("Parameter 5 to __builtin_cop is not a valid coprocessor operation.");
  2209. + error ("Number should be between 0 and 127.");
  2210. + return NULL_RTX;
  2211. + }
  2212. +
  2213. + pat = GEN_FCN (icode) (op0, op1, op2, op3, op4);
  2214. + if (!pat)
  2215. + return 0;
  2216. + emit_insn (pat);
  2217. +
  2218. + return target;
  2219. + }
  2220. +
  2221. + case AVR32_BUILTIN_MEMS:
  2222. + case AVR32_BUILTIN_MEMC:
  2223. + case AVR32_BUILTIN_MEMT:
  2224. + {
  2225. + if (!TARGET_RMW)
  2226. + error ("Trying to use __builtin_mem(s/c/t) when target does not support RMW insns.");
  2227. +
  2228. + switch (fcode) {
  2229. + case AVR32_BUILTIN_MEMS:
  2230. + icode = CODE_FOR_iorsi3;
  2231. + break;
  2232. + case AVR32_BUILTIN_MEMC:
  2233. + icode = CODE_FOR_andsi3;
  2234. + break;
  2235. + case AVR32_BUILTIN_MEMT:
  2236. + icode = CODE_FOR_xorsi3;
  2237. + break;
  2238. + }
  2239. + arg0 = CALL_EXPR_ARG (exp,0);
  2240. + arg1 = CALL_EXPR_ARG (exp,1);
  2241. + op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
  2242. + if ( GET_CODE (op0) == SYMBOL_REF )
  2243. + // This symbol must be RMW addressable
  2244. + SYMBOL_REF_FLAGS (op0) |= (1 << SYMBOL_FLAG_RMW_ADDR_SHIFT);
  2245. + op0 = gen_rtx_MEM(SImode, op0);
  2246. + op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
  2247. + mode0 = insn_data[icode].operand[1].mode;
  2248. +
  2249. +
  2250. + if (!(*insn_data[icode].operand[1].predicate) (op0, mode0))
  2251. + {
  2252. + error ("Parameter 1 to __builtin_mem(s/c/t) must be a Ks15<<2 address or a rmw addressable symbol.");
  2253. + }
  2254. +
  2255. + if ( !CONST_INT_P (op1)
  2256. + || INTVAL (op1) > 31
  2257. + || INTVAL (op1) < 0 )
  2258. + error ("Parameter 2 to __builtin_mem(s/c/t) must be a constant between 0 and 31.");
  2259. +
  2260. + if ( fcode == AVR32_BUILTIN_MEMC )
  2261. + op1 = GEN_INT((~(1 << INTVAL(op1)))&0xffffffff);
  2262. + else
  2263. + op1 = GEN_INT((1 << INTVAL(op1))&0xffffffff);
  2264. + pat = GEN_FCN (icode) (op0, op0, op1);
  2265. + if (!pat)
  2266. + return 0;
  2267. + emit_insn (pat);
  2268. + return op0;
  2269. + }
  2270. +
  2271. + case AVR32_BUILTIN_SLEEP:
  2272. + {
  2273. + arg0 = CALL_EXPR_ARG (exp, 0);
  2274. + op0 = expand_normal (arg0);
  2275. + int intval = INTVAL(op0);
  2276. +
  2277. + /* Check if the argument if integer and if the value of integer
  2278. + is greater than 0. */
  2279. +
  2280. + if (!CONSTANT_P (op0))
  2281. + error ("Parameter 1 to __builtin_sleep() is not a valid integer.");
  2282. + if (intval < 0 )
  2283. + error ("Parameter 1 to __builtin_sleep() should be an integer greater than 0.");
  2284. +
  2285. + int strncmpval = strncmp (avr32_part_name,"uc3l", 4);
  2286. +
  2287. + /* Check if op0 is less than 7 for uc3l* and less than 6 for other
  2288. + devices. By this check we are avoiding if operand is less than
  2289. + 256. For more devices, add more such checks. */
  2290. +
  2291. + if ( strncmpval == 0 && intval >= 7)
  2292. + error ("Parameter 1 to __builtin_sleep() should be less than or equal to 7.");
  2293. + else if ( strncmp != 0 && intval >= 6)
  2294. + error ("Parameter 1 to __builtin_sleep() should be less than or equal to 6.");
  2295. +
  2296. + emit_insn (gen_sleep(op0));
  2297. + return target;
  2298. +
  2299. + }
  2300. + case AVR32_BUILTIN_DELAY_CYCLES:
  2301. + {
  2302. + arg0 = CALL_EXPR_ARG (exp, 0);
  2303. + op0 = expand_expr (arg0, NULL_RTX, VOIDmode, 0);
  2304. +
  2305. + if (TARGET_ARCH_AP)
  2306. + error (" __builtin_avr32_delay_cycles() not supported for \'%s\' architecture.", avr32_arch_name);
  2307. + if (!CONSTANT_P (op0))
  2308. + error ("Parameter 1 to __builtin_avr32_delay_cycles() should be an integer.");
  2309. + emit_insn (gen_delay_cycles (op0));
  2310. + return 0;
  2311. +
  2312. + }
  2313. +
  2314. + }
  2315. +
  2316. + for (i = 0, d = bdesc_2arg; i < ARRAY_SIZE (bdesc_2arg); i++, d++)
  2317. + if (d->code == fcode)
  2318. + return avr32_expand_binop_builtin (d->icode, exp, target);
  2319. +
  2320. +
  2321. + /* @@@ Should really do something sensible here. */
  2322. + return NULL_RTX;
  2323. +}
  2324. +
  2325. +
  2326. +/* Handle an "interrupt" or "isr" attribute;
  2327. + arguments as in struct attribute_spec.handler. */
  2328. +static tree
  2329. +avr32_handle_isr_attribute (tree * node, tree name, tree args,
  2330. + int flags, bool * no_add_attrs)
  2331. +{
  2332. + if (DECL_P (*node))
  2333. + {
  2334. + if (TREE_CODE (*node) != FUNCTION_DECL)
  2335. + {
  2336. + warning (OPT_Wattributes,"`%s' attribute only applies to functions",
  2337. + IDENTIFIER_POINTER (name));
  2338. + *no_add_attrs = true;
  2339. + }
  2340. + /* FIXME: the argument if any is checked for type attributes; should it
  2341. + be checked for decl ones? */
  2342. + }
  2343. + else
  2344. + {
  2345. + if (TREE_CODE (*node) == FUNCTION_TYPE
  2346. + || TREE_CODE (*node) == METHOD_TYPE)
  2347. + {
  2348. + if (avr32_isr_value (args) == AVR32_FT_UNKNOWN)
  2349. + {
  2350. + warning (OPT_Wattributes,"`%s' attribute ignored", IDENTIFIER_POINTER (name));
  2351. + *no_add_attrs = true;
  2352. + }
  2353. + }
  2354. + else if (TREE_CODE (*node) == POINTER_TYPE
  2355. + && (TREE_CODE (TREE_TYPE (*node)) == FUNCTION_TYPE
  2356. + || TREE_CODE (TREE_TYPE (*node)) == METHOD_TYPE)
  2357. + && avr32_isr_value (args) != AVR32_FT_UNKNOWN)
  2358. + {
  2359. + *node = build_variant_type_copy (*node);
  2360. + TREE_TYPE (*node) = build_type_attribute_variant
  2361. + (TREE_TYPE (*node),
  2362. + tree_cons (name, args, TYPE_ATTRIBUTES (TREE_TYPE (*node))));
  2363. + *no_add_attrs = true;
  2364. + }
  2365. + else
  2366. + {
  2367. + /* Possibly pass this attribute on from the type to a decl. */
  2368. + if (flags & ((int) ATTR_FLAG_DECL_NEXT
  2369. + | (int) ATTR_FLAG_FUNCTION_NEXT
  2370. + | (int) ATTR_FLAG_ARRAY_NEXT))
  2371. + {
  2372. + *no_add_attrs = true;
  2373. + return tree_cons (name, args, NULL_TREE);
  2374. + }
  2375. + else
  2376. + {
  2377. + warning (OPT_Wattributes,"`%s' attribute ignored", IDENTIFIER_POINTER (name));
  2378. + }
  2379. + }
  2380. + }
  2381. +
  2382. + return NULL_TREE;
  2383. +}
  2384. +
  2385. +
  2386. +/* Handle an attribute requiring a FUNCTION_DECL;
  2387. + arguments as in struct attribute_spec.handler. */
  2388. +static tree
  2389. +avr32_handle_fndecl_attribute (tree * node, tree name,
  2390. + tree args,
  2391. + int flags ATTRIBUTE_UNUSED,
  2392. + bool * no_add_attrs)
  2393. +{
  2394. + if (TREE_CODE (*node) != FUNCTION_DECL)
  2395. + {
  2396. + warning (OPT_Wattributes,"%qs attribute only applies to functions",
  2397. + IDENTIFIER_POINTER (name));
  2398. + *no_add_attrs = true;
  2399. + return NULL_TREE;
  2400. + }
  2401. +
  2402. + fndecl_attribute_args = args;
  2403. + if (args == NULL_TREE)
  2404. + return NULL_TREE;
  2405. +
  2406. + tree value = TREE_VALUE (args);
  2407. + if (TREE_CODE (value) != INTEGER_CST)
  2408. + {
  2409. + warning (OPT_Wattributes,
  2410. + "argument of %qs attribute is not an integer constant",
  2411. + IDENTIFIER_POINTER (name));
  2412. + *no_add_attrs = true;
  2413. + }
  2414. +
  2415. + return NULL_TREE;
  2416. +}
  2417. +
  2418. +
  2419. +/* Handle an acall attribute;
  2420. + arguments as in struct attribute_spec.handler. */
  2421. +
  2422. +static tree
  2423. +avr32_handle_acall_attribute (tree * node, tree name,
  2424. + tree args ATTRIBUTE_UNUSED,
  2425. + int flags ATTRIBUTE_UNUSED, bool * no_add_attrs)
  2426. +{
  2427. + if (TREE_CODE (*node) == FUNCTION_TYPE || TREE_CODE (*node) == METHOD_TYPE)
  2428. + {
  2429. + warning (OPT_Wattributes,"`%s' attribute not yet supported...",
  2430. + IDENTIFIER_POINTER (name));
  2431. + *no_add_attrs = true;
  2432. + return NULL_TREE;
  2433. + }
  2434. +
  2435. + warning (OPT_Wattributes,"`%s' attribute only applies to functions",
  2436. + IDENTIFIER_POINTER (name));
  2437. + *no_add_attrs = true;
  2438. + return NULL_TREE;
  2439. +}
  2440. +
  2441. +
  2442. +bool
  2443. +avr32_flashvault_call(tree decl)
  2444. +{
  2445. + tree attributes;
  2446. + tree fv_attribute;
  2447. + tree vector_tree;
  2448. + unsigned int vector;
  2449. +
  2450. + if (decl && TREE_CODE (decl) == FUNCTION_DECL)
  2451. + {
  2452. + attributes = DECL_ATTRIBUTES(decl);
  2453. + fv_attribute = lookup_attribute ("flashvault", attributes);
  2454. + if (fv_attribute != NULL_TREE)
  2455. + {
  2456. + /* Get attribute parameter, for the function vector number. */
  2457. + /*
  2458. + There is probably an easier, standard way to retrieve the
  2459. + attribute parameter which needs to be done here.
  2460. + */
  2461. + vector_tree = TREE_VALUE(fv_attribute);
  2462. + if (vector_tree != NULL_TREE)
  2463. + {
  2464. + vector = (unsigned int)TREE_INT_CST_LOW(TREE_VALUE(vector_tree));
  2465. + fprintf (asm_out_file,
  2466. + "\tmov\tr8, lo(%i)\t# Load vector number for sscall.\n",
  2467. + vector);
  2468. + }
  2469. +
  2470. + fprintf (asm_out_file,
  2471. + "\tsscall\t# Secure system call.\n");
  2472. +
  2473. + return true;
  2474. + }
  2475. + }
  2476. +
  2477. + return false;
  2478. +}
  2479. +
  2480. +
  2481. +static bool has_attribute_p (tree decl, const char *name)
  2482. +{
  2483. + if (decl && TREE_CODE (decl) == FUNCTION_DECL)
  2484. + {
  2485. + return (lookup_attribute (name, DECL_ATTRIBUTES(decl)) != NULL_TREE);
  2486. + }
  2487. + return NULL_TREE;
  2488. +}
  2489. +
  2490. +
  2491. +/* Return 0 if the attributes for two types are incompatible, 1 if they
  2492. + are compatible, and 2 if they are nearly compatible (which causes a
  2493. + warning to be generated). */
  2494. +static int
  2495. +avr32_comp_type_attributes (tree type1, tree type2)
  2496. +{
  2497. + bool acall1, acall2, isr1, isr2, naked1, naked2, fv1, fv2, fvimpl1, fvimpl2;
  2498. +
  2499. + /* Check for mismatch of non-default calling convention. */
  2500. + if (TREE_CODE (type1) != FUNCTION_TYPE)
  2501. + return 1;
  2502. +
  2503. + /* Check for mismatched call attributes. */
  2504. + acall1 = lookup_attribute ("acall", TYPE_ATTRIBUTES (type1)) != NULL;
  2505. + acall2 = lookup_attribute ("acall", TYPE_ATTRIBUTES (type2)) != NULL;
  2506. + naked1 = lookup_attribute ("naked", TYPE_ATTRIBUTES (type1)) != NULL;
  2507. + naked2 = lookup_attribute ("naked", TYPE_ATTRIBUTES (type2)) != NULL;
  2508. + fv1 = lookup_attribute ("flashvault", TYPE_ATTRIBUTES (type1)) != NULL;
  2509. + fv2 = lookup_attribute ("flashvault", TYPE_ATTRIBUTES (type2)) != NULL;
  2510. + fvimpl1 = lookup_attribute ("flashvault_impl", TYPE_ATTRIBUTES (type1)) != NULL;
  2511. + fvimpl2 = lookup_attribute ("flashvault_impl", TYPE_ATTRIBUTES (type2)) != NULL;
  2512. + isr1 = lookup_attribute ("isr", TYPE_ATTRIBUTES (type1)) != NULL;
  2513. + if (!isr1)
  2514. + isr1 = lookup_attribute ("interrupt", TYPE_ATTRIBUTES (type1)) != NULL;
  2515. +
  2516. + isr2 = lookup_attribute ("isr", TYPE_ATTRIBUTES (type2)) != NULL;
  2517. + if (!isr2)
  2518. + isr2 = lookup_attribute ("interrupt", TYPE_ATTRIBUTES (type2)) != NULL;
  2519. +
  2520. + if ((acall1 && isr2)
  2521. + || (acall2 && isr1)
  2522. + || (naked1 && isr2)
  2523. + || (naked2 && isr1)
  2524. + || (fv1 && isr2)
  2525. + || (fv2 && isr1)
  2526. + || (fvimpl1 && isr2)
  2527. + || (fvimpl2 && isr1)
  2528. + || (fv1 && fvimpl2)
  2529. + || (fv2 && fvimpl1)
  2530. + )
  2531. + return 0;
  2532. +
  2533. + return 1;
  2534. +}
  2535. +
  2536. +
  2537. +/* Computes the type of the current function. */
  2538. +static unsigned long
  2539. +avr32_compute_func_type (void)
  2540. +{
  2541. + unsigned long type = AVR32_FT_UNKNOWN;
  2542. + tree a;
  2543. + tree attr;
  2544. +
  2545. + if (TREE_CODE (current_function_decl) != FUNCTION_DECL)
  2546. + abort ();
  2547. +
  2548. + /* Decide if the current function is volatile. Such functions never
  2549. + return, and many memory cycles can be saved by not storing register
  2550. + values that will never be needed again. This optimization was added to
  2551. + speed up context switching in a kernel application. */
  2552. + if (optimize > 0
  2553. + && TREE_NOTHROW (current_function_decl)
  2554. + && TREE_THIS_VOLATILE (current_function_decl))
  2555. + type |= AVR32_FT_VOLATILE;
  2556. +
  2557. + if (cfun->static_chain_decl != NULL)
  2558. + type |= AVR32_FT_NESTED;
  2559. +
  2560. + attr = DECL_ATTRIBUTES (current_function_decl);
  2561. +
  2562. + a = lookup_attribute ("isr", attr);
  2563. + if (a == NULL_TREE)
  2564. + a = lookup_attribute ("interrupt", attr);
  2565. +
  2566. + if (a == NULL_TREE)
  2567. + type |= AVR32_FT_NORMAL;
  2568. + else
  2569. + type |= avr32_isr_value (TREE_VALUE (a));
  2570. +
  2571. +
  2572. + a = lookup_attribute ("acall", attr);
  2573. + if (a != NULL_TREE)
  2574. + type |= AVR32_FT_ACALL;
  2575. +
  2576. + a = lookup_attribute ("naked", attr);
  2577. + if (a != NULL_TREE)
  2578. + type |= AVR32_FT_NAKED;
  2579. +
  2580. + a = lookup_attribute ("flashvault", attr);
  2581. + if (a != NULL_TREE)
  2582. + type |= AVR32_FT_FLASHVAULT;
  2583. +
  2584. + a = lookup_attribute ("flashvault_impl", attr);
  2585. + if (a != NULL_TREE)
  2586. + type |= AVR32_FT_FLASHVAULT_IMPL;
  2587. +
  2588. + return type;
  2589. +}
  2590. +
  2591. +
  2592. +/* Returns the type of the current function. */
  2593. +static unsigned long
  2594. +avr32_current_func_type (void)
  2595. +{
  2596. + if (AVR32_FUNC_TYPE (cfun->machine->func_type) == AVR32_FT_UNKNOWN)
  2597. + cfun->machine->func_type = avr32_compute_func_type ();
  2598. +
  2599. + return cfun->machine->func_type;
  2600. +}
  2601. +
  2602. +
  2603. +/*
  2604. +This target hook should return true if we should not pass type solely
  2605. +in registers. The file expr.h defines a definition that is usually appropriate,
  2606. +refer to expr.h for additional documentation.
  2607. +*/
  2608. +bool
  2609. +avr32_must_pass_in_stack (enum machine_mode mode ATTRIBUTE_UNUSED, tree type)
  2610. +{
  2611. + if (type && AGGREGATE_TYPE_P (type)
  2612. + /* If the alignment is less than the size then pass in the struct on
  2613. + the stack. */
  2614. + && ((unsigned int) TYPE_ALIGN_UNIT (type) <
  2615. + (unsigned int) int_size_in_bytes (type))
  2616. + /* If we support unaligned word accesses then structs of size 4 and 8
  2617. + can have any alignment and still be passed in registers. */
  2618. + && !(TARGET_UNALIGNED_WORD
  2619. + && (int_size_in_bytes (type) == 4
  2620. + || int_size_in_bytes (type) == 8))
  2621. + /* Double word structs need only a word alignment. */
  2622. + && !(int_size_in_bytes (type) == 8 && TYPE_ALIGN_UNIT (type) >= 4))
  2623. + return true;
  2624. +
  2625. + if (type && AGGREGATE_TYPE_P (type)
  2626. + /* Structs of size 3,5,6,7 are always passed in registers. */
  2627. + && (int_size_in_bytes (type) == 3
  2628. + || int_size_in_bytes (type) == 5
  2629. + || int_size_in_bytes (type) == 6 || int_size_in_bytes (type) == 7))
  2630. + return true;
  2631. +
  2632. +
  2633. + return (type && TREE_ADDRESSABLE (type));
  2634. +}
  2635. +
  2636. +
  2637. +bool
  2638. +avr32_strict_argument_naming (CUMULATIVE_ARGS * ca ATTRIBUTE_UNUSED)
  2639. +{
  2640. + return true;
  2641. +}
  2642. +
  2643. +
  2644. +/*
  2645. + This target hook should return true if an argument at the position indicated
  2646. + by cum should be passed by reference. This predicate is queried after target
  2647. + independent reasons for being passed by reference, such as TREE_ADDRESSABLE (type).
  2648. +
  2649. + If the hook returns true, a copy of that argument is made in memory and a
  2650. + pointer to the argument is passed instead of the argument itself. The pointer
  2651. + is passed in whatever way is appropriate for passing a pointer to that type.
  2652. +*/
  2653. +bool
  2654. +avr32_pass_by_reference (CUMULATIVE_ARGS * cum ATTRIBUTE_UNUSED,
  2655. + enum machine_mode mode ATTRIBUTE_UNUSED,
  2656. + tree type, bool named ATTRIBUTE_UNUSED)
  2657. +{
  2658. + return (type && (TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST));
  2659. +}
  2660. +
  2661. +
  2662. +static int
  2663. +avr32_arg_partial_bytes (CUMULATIVE_ARGS * pcum ATTRIBUTE_UNUSED,
  2664. + enum machine_mode mode ATTRIBUTE_UNUSED,
  2665. + tree type ATTRIBUTE_UNUSED,
  2666. + bool named ATTRIBUTE_UNUSED)
  2667. +{
  2668. + return 0;
  2669. +}
  2670. +
  2671. +
  2672. +struct gcc_target targetm = TARGET_INITIALIZER;
  2673. +
  2674. +/*
  2675. + Table used to convert from register number in the assembler instructions and
  2676. + the register numbers used in gcc.
  2677. +*/
  2678. +const int avr32_function_arg_reglist[] = {
  2679. + INTERNAL_REGNUM (12),
  2680. + INTERNAL_REGNUM (11),
  2681. + INTERNAL_REGNUM (10),
  2682. + INTERNAL_REGNUM (9),
  2683. + INTERNAL_REGNUM (8)
  2684. +};
  2685. +
  2686. +
  2687. +rtx avr32_compare_op0 = NULL_RTX;
  2688. +rtx avr32_compare_op1 = NULL_RTX;
  2689. +rtx avr32_compare_operator = NULL_RTX;
  2690. +rtx avr32_acc_cache = NULL_RTX;
  2691. +/* type of branch to use */
  2692. +enum avr32_cmp_type avr32_branch_type;
  2693. +
  2694. +
  2695. +/*
  2696. + Returns nonzero if it is allowed to store a value of mode mode in hard
  2697. + register number regno.
  2698. +*/
  2699. +int
  2700. +avr32_hard_regno_mode_ok (int regnr, enum machine_mode mode)
  2701. +{
  2702. + switch (mode)
  2703. + {
  2704. + case DImode: /* long long */
  2705. + case DFmode: /* double */
  2706. + case SCmode: /* __complex__ float */
  2707. + case CSImode: /* __complex__ int */
  2708. + if (regnr < 4)
  2709. + { /* long long int not supported in r12, sp, lr or pc. */
  2710. + return 0;
  2711. + }
  2712. + else
  2713. + {
  2714. + /* long long int has to be referred in even registers. */
  2715. + if (regnr % 2)
  2716. + return 0;
  2717. + else
  2718. + return 1;
  2719. + }
  2720. + case CDImode: /* __complex__ long long */
  2721. + case DCmode: /* __complex__ double */
  2722. + case TImode: /* 16 bytes */
  2723. + if (regnr < 7)
  2724. + return 0;
  2725. + else if (regnr % 2)
  2726. + return 0;
  2727. + else
  2728. + return 1;
  2729. + default:
  2730. + return 1;
  2731. + }
  2732. +}
  2733. +
  2734. +
  2735. +int
  2736. +avr32_rnd_operands (rtx add, rtx shift)
  2737. +{
  2738. + if (GET_CODE (shift) == CONST_INT &&
  2739. + GET_CODE (add) == CONST_INT && INTVAL (shift) > 0)
  2740. + {
  2741. + if ((1 << (INTVAL (shift) - 1)) == INTVAL (add))
  2742. + return TRUE;
  2743. + }
  2744. +
  2745. + return FALSE;
  2746. +}
  2747. +
  2748. +
  2749. +int
  2750. +avr32_const_ok_for_constraint_p (HOST_WIDE_INT value, char c, const char *str)
  2751. +{
  2752. + switch (c)
  2753. + {
  2754. + case 'K':
  2755. + case 'I':
  2756. + {
  2757. + HOST_WIDE_INT min_value = 0, max_value = 0;
  2758. + char size_str[3];
  2759. + int const_size;
  2760. +
  2761. + size_str[0] = str[2];
  2762. + size_str[1] = str[3];
  2763. + size_str[2] = '\0';
  2764. + const_size = atoi (size_str);
  2765. +
  2766. + if (TOUPPER (str[1]) == 'U')
  2767. + {
  2768. + min_value = 0;
  2769. + max_value = (1 << const_size) - 1;
  2770. + }
  2771. + else if (TOUPPER (str[1]) == 'S')
  2772. + {
  2773. + min_value = -(1 << (const_size - 1));
  2774. + max_value = (1 << (const_size - 1)) - 1;
  2775. + }
  2776. +
  2777. + if (c == 'I')
  2778. + {
  2779. + value = -value;
  2780. + }
  2781. +
  2782. + if (value >= min_value && value <= max_value)
  2783. + {
  2784. + return 1;
  2785. + }
  2786. + break;
  2787. + }
  2788. + case 'M':
  2789. + return avr32_mask_upper_bits_operand (GEN_INT (value), VOIDmode);
  2790. + case 'J':
  2791. + return avr32_hi16_immediate_operand (GEN_INT (value), VOIDmode);
  2792. + case 'O':
  2793. + return one_bit_set_operand (GEN_INT (value), VOIDmode);
  2794. + case 'N':
  2795. + return one_bit_cleared_operand (GEN_INT (value), VOIDmode);
  2796. + case 'L':
  2797. + /* The lower 16-bits are set. */
  2798. + return ((value & 0xffff) == 0xffff) ;
  2799. + }
  2800. +
  2801. + return 0;
  2802. +}
  2803. +
  2804. +
  2805. +/* Compute mask of registers which needs saving upon function entry. */
  2806. +static unsigned long
  2807. +avr32_compute_save_reg_mask (int push)
  2808. +{
  2809. + unsigned long func_type;
  2810. + unsigned int save_reg_mask = 0;
  2811. + unsigned int reg;
  2812. +
  2813. + func_type = avr32_current_func_type ();
  2814. +
  2815. + if (IS_INTERRUPT (func_type))
  2816. + {
  2817. + unsigned int max_reg = 12;
  2818. +
  2819. + /* Get the banking scheme for the interrupt */
  2820. + switch (func_type)
  2821. + {
  2822. + case AVR32_FT_ISR_FULL:
  2823. + max_reg = 0;
  2824. + break;
  2825. + case AVR32_FT_ISR_HALF:
  2826. + max_reg = 7;
  2827. + break;
  2828. + case AVR32_FT_ISR_NONE:
  2829. + max_reg = 12;
  2830. + break;
  2831. + }
  2832. +
  2833. + /* Interrupt functions must not corrupt any registers, even call
  2834. + clobbered ones. If this is a leaf function we can just examine the
  2835. + registers used by the RTL, but otherwise we have to assume that
  2836. + whatever function is called might clobber anything, and so we have
  2837. + to save all the call-clobbered registers as well. */
  2838. +
  2839. + /* Need not push the registers r8-r12 for AVR32A architectures, as this
  2840. + is automatially done in hardware. We also do not have any shadow
  2841. + registers. */
  2842. + if (TARGET_UARCH_AVR32A)
  2843. + {
  2844. + max_reg = 7;
  2845. + func_type = AVR32_FT_ISR_NONE;
  2846. + }
  2847. +
  2848. + /* All registers which are used and are not shadowed must be saved. */
  2849. + for (reg = 0; reg <= max_reg; reg++)
  2850. + if (df_regs_ever_live_p (INTERNAL_REGNUM (reg))
  2851. + || (!current_function_is_leaf
  2852. + && call_used_regs[INTERNAL_REGNUM (reg)]))
  2853. + save_reg_mask |= (1 << reg);
  2854. +
  2855. + /* Check LR */
  2856. + if ((df_regs_ever_live_p (LR_REGNUM)
  2857. + || !current_function_is_leaf || frame_pointer_needed)
  2858. + /* Only non-shadowed register models */
  2859. + && (func_type == AVR32_FT_ISR_NONE))
  2860. + save_reg_mask |= (1 << ASM_REGNUM (LR_REGNUM));
  2861. +
  2862. + /* Make sure that the GOT register is pushed. */
  2863. + if (max_reg >= ASM_REGNUM (PIC_OFFSET_TABLE_REGNUM)
  2864. + && crtl->uses_pic_offset_table)
  2865. + save_reg_mask |= (1 << ASM_REGNUM (PIC_OFFSET_TABLE_REGNUM));
  2866. +
  2867. + }
  2868. + else
  2869. + {
  2870. + int use_pushm = optimize_size;
  2871. +
  2872. + /* In the normal case we only need to save those registers which are
  2873. + call saved and which are used by this function. */
  2874. + for (reg = 0; reg <= 7; reg++)
  2875. + if (df_regs_ever_live_p (INTERNAL_REGNUM (reg))
  2876. + && !call_used_regs[INTERNAL_REGNUM (reg)])
  2877. + save_reg_mask |= (1 << reg);
  2878. +
  2879. + /* Make sure that the GOT register is pushed. */
  2880. + if (crtl->uses_pic_offset_table)
  2881. + save_reg_mask |= (1 << ASM_REGNUM (PIC_OFFSET_TABLE_REGNUM));
  2882. +
  2883. +
  2884. + /* If we optimize for size and do not have anonymous arguments: use
  2885. + pushm/popm always. */
  2886. + if (use_pushm)
  2887. + {
  2888. + if ((save_reg_mask & (1 << 0))
  2889. + || (save_reg_mask & (1 << 1))
  2890. + || (save_reg_mask & (1 << 2)) || (save_reg_mask & (1 << 3)))
  2891. + save_reg_mask |= 0xf;
  2892. +
  2893. + if ((save_reg_mask & (1 << 4))
  2894. + || (save_reg_mask & (1 << 5))
  2895. + || (save_reg_mask & (1 << 6)) || (save_reg_mask & (1 << 7)))
  2896. + save_reg_mask |= 0xf0;
  2897. +
  2898. + if ((save_reg_mask & (1 << 8)) || (save_reg_mask & (1 << 9)))
  2899. + save_reg_mask |= 0x300;
  2900. + }
  2901. +
  2902. +
  2903. + /* Check LR */
  2904. + if ((df_regs_ever_live_p (LR_REGNUM)
  2905. + || !current_function_is_leaf
  2906. + || (optimize_size
  2907. + && save_reg_mask
  2908. + && !crtl->calls_eh_return)
  2909. + || frame_pointer_needed)
  2910. + && !IS_FLASHVAULT (func_type))
  2911. + {
  2912. + if (push
  2913. + /* Never pop LR into PC for functions which
  2914. + calls __builtin_eh_return, since we need to
  2915. + fix the SP after the restoring of the registers
  2916. + and before returning. */
  2917. + || crtl->calls_eh_return)
  2918. + {
  2919. + /* Push/Pop LR */
  2920. + save_reg_mask |= (1 << ASM_REGNUM (LR_REGNUM));
  2921. + }
  2922. + else
  2923. + {
  2924. + /* Pop PC */
  2925. + save_reg_mask |= (1 << ASM_REGNUM (PC_REGNUM));
  2926. + }
  2927. + }
  2928. + }
  2929. +
  2930. +
  2931. + /* Save registers so the exception handler can modify them. */
  2932. + if (crtl->calls_eh_return)
  2933. + {
  2934. + unsigned int i;
  2935. +
  2936. + for (i = 0;; i++)
  2937. + {
  2938. + reg = EH_RETURN_DATA_REGNO (i);
  2939. + if (reg == INVALID_REGNUM)
  2940. + break;
  2941. + save_reg_mask |= 1 << ASM_REGNUM (reg);
  2942. + }
  2943. + }
  2944. +
  2945. + return save_reg_mask;
  2946. +}
  2947. +
  2948. +
  2949. +/* Compute total size in bytes of all saved registers. */
  2950. +static int
  2951. +avr32_get_reg_mask_size (int reg_mask)
  2952. +{
  2953. + int reg, size;
  2954. + size = 0;
  2955. +
  2956. + for (reg = 0; reg <= 15; reg++)
  2957. + if (reg_mask & (1 << reg))
  2958. + size += 4;
  2959. +
  2960. + return size;
  2961. +}
  2962. +
  2963. +
  2964. +/* Get a register from one of the registers which are saved onto the stack
  2965. + upon function entry. */
  2966. +static int
  2967. +avr32_get_saved_reg (int save_reg_mask)
  2968. +{
  2969. + unsigned int reg;
  2970. +
  2971. + /* Find the first register which is saved in the saved_reg_mask */
  2972. + for (reg = 0; reg <= 15; reg++)
  2973. + if (save_reg_mask & (1 << reg))
  2974. + return reg;
  2975. +
  2976. + return -1;
  2977. +}
  2978. +
  2979. +
  2980. +/* Return 1 if it is possible to return using a single instruction. */
  2981. +int
  2982. +avr32_use_return_insn (int iscond)
  2983. +{
  2984. + unsigned int func_type = avr32_current_func_type ();
  2985. + unsigned long saved_int_regs;
  2986. +
  2987. + /* Never use a return instruction before reload has run. */
  2988. + if (!reload_completed)
  2989. + return 0;
  2990. +
  2991. + /* Must adjust the stack for vararg functions. */
  2992. + if (crtl->args.info.uses_anonymous_args)
  2993. + return 0;
  2994. +
  2995. + /* If there a stack adjstment. */
  2996. + if (get_frame_size ())
  2997. + return 0;
  2998. +
  2999. + saved_int_regs = avr32_compute_save_reg_mask (TRUE);
  3000. +
  3001. + /* Conditional returns can not be performed in one instruction if we need
  3002. + to restore registers from the stack */
  3003. + if (iscond && saved_int_regs)
  3004. + return 0;
  3005. +
  3006. + /* Conditional return can not be used for interrupt handlers. */
  3007. + if (iscond && IS_INTERRUPT (func_type))
  3008. + return 0;
  3009. +
  3010. + /* For interrupt handlers which needs to pop registers */
  3011. + if (saved_int_regs && IS_INTERRUPT (func_type))
  3012. + return 0;
  3013. +
  3014. +
  3015. + /* If there are saved registers but the LR isn't saved, then we need two
  3016. + instructions for the return. */
  3017. + if (saved_int_regs && !(saved_int_regs & (1 << ASM_REGNUM (LR_REGNUM))))
  3018. + return 0;
  3019. +
  3020. +
  3021. + return 1;
  3022. +}
  3023. +
  3024. +
  3025. +/* Generate some function prologue info in the assembly file. */
  3026. +void
  3027. +avr32_target_asm_function_prologue (FILE * f, HOST_WIDE_INT frame_size)
  3028. +{
  3029. + unsigned long func_type = avr32_current_func_type ();
  3030. +
  3031. + if (IS_NAKED (func_type))
  3032. + fprintf (f,
  3033. + "\t# Function is naked: Prologue and epilogue provided by programmer\n");
  3034. +
  3035. + if (IS_FLASHVAULT (func_type))
  3036. + {
  3037. + fprintf(f,
  3038. + "\t.ident \"flashvault\"\n\t# Function is defined with flashvault attribute.\n");
  3039. + }
  3040. +
  3041. + if (IS_FLASHVAULT_IMPL (func_type))
  3042. + {
  3043. + fprintf(f,
  3044. + "\t.ident \"flashvault\"\n\t# Function is defined with flashvault_impl attribute.\n");
  3045. +
  3046. + /* Save information on flashvault function declaration. */
  3047. + tree fv_attribute = lookup_attribute ("flashvault_impl", DECL_ATTRIBUTES(current_function_decl));
  3048. + if (fv_attribute != NULL_TREE)
  3049. + {
  3050. + tree vector_tree = TREE_VALUE(fv_attribute);
  3051. + if (vector_tree != NULL_TREE)
  3052. + {
  3053. + unsigned int vector_num;
  3054. + const char * name;
  3055. +
  3056. + vector_num = (unsigned int) TREE_INT_CST_LOW (TREE_VALUE (vector_tree));
  3057. +
  3058. + name = XSTR (XEXP (DECL_RTL (current_function_decl), 0), 0);
  3059. +
  3060. + flashvault_decl_list_add (vector_num, name);
  3061. + }
  3062. + }
  3063. + }
  3064. +
  3065. + if (IS_INTERRUPT (func_type))
  3066. + {
  3067. + switch (func_type)
  3068. + {
  3069. + case AVR32_FT_ISR_FULL:
  3070. + fprintf (f,
  3071. + "\t# Interrupt Function: Fully shadowed register file\n");
  3072. + break;
  3073. + case AVR32_FT_ISR_HALF:
  3074. + fprintf (f,
  3075. + "\t# Interrupt Function: Half shadowed register file\n");
  3076. + break;
  3077. + default:
  3078. + case AVR32_FT_ISR_NONE:
  3079. + fprintf (f, "\t# Interrupt Function: No shadowed register file\n");
  3080. + break;
  3081. + }
  3082. + }
  3083. +
  3084. +
  3085. + fprintf (f, "\t# args = %i, frame = %li, pretend = %i\n",
  3086. + crtl->args.size, frame_size,
  3087. + crtl->args.pretend_args_size);
  3088. +
  3089. + fprintf (f, "\t# frame_needed = %i, leaf_function = %i\n",
  3090. + frame_pointer_needed, current_function_is_leaf);
  3091. +
  3092. + fprintf (f, "\t# uses_anonymous_args = %i\n",
  3093. + crtl->args.info.uses_anonymous_args);
  3094. +
  3095. + if (crtl->calls_eh_return)
  3096. + fprintf (f, "\t# Calls __builtin_eh_return.\n");
  3097. +
  3098. +}
  3099. +
  3100. +
  3101. +/* Generate and emit an insn that we will recognize as a pushm or stm.
  3102. + Unfortunately, since this insn does not reflect very well the actual
  3103. + semantics of the operation, we need to annotate the insn for the benefit
  3104. + of DWARF2 frame unwind information. */
  3105. +
  3106. +int avr32_convert_to_reglist16 (int reglist8_vect);
  3107. +
  3108. +static rtx
  3109. +emit_multi_reg_push (int reglist, int usePUSHM)
  3110. +{
  3111. + rtx insn;
  3112. + rtx dwarf;
  3113. + rtx tmp;
  3114. + rtx reg;
  3115. + int i;
  3116. + int nr_regs;
  3117. + int index = 0;
  3118. +
  3119. + if (usePUSHM)
  3120. + {
  3121. + insn = emit_insn (gen_pushm (gen_rtx_CONST_INT (SImode, reglist)));
  3122. + reglist = avr32_convert_to_reglist16 (reglist);
  3123. + }
  3124. + else
  3125. + {
  3126. + insn = emit_insn (gen_stm (stack_pointer_rtx,
  3127. + gen_rtx_CONST_INT (SImode, reglist),
  3128. + gen_rtx_CONST_INT (SImode, 1)));
  3129. + }
  3130. +
  3131. + nr_regs = avr32_get_reg_mask_size (reglist) / 4;
  3132. + dwarf = gen_rtx_SEQUENCE (VOIDmode, rtvec_alloc (nr_regs + 1));
  3133. +
  3134. + for (i = 15; i >= 0; i--)
  3135. + {
  3136. + if (reglist & (1 << i))
  3137. + {
  3138. + reg = gen_rtx_REG (SImode, INTERNAL_REGNUM (i));
  3139. + tmp = gen_rtx_SET (VOIDmode,
  3140. + gen_rtx_MEM (SImode,
  3141. + plus_constant (stack_pointer_rtx,
  3142. + 4 * index)), reg);
  3143. + RTX_FRAME_RELATED_P (tmp) = 1;
  3144. + XVECEXP (dwarf, 0, 1 + index++) = tmp;
  3145. + }
  3146. + }
  3147. +
  3148. + tmp = gen_rtx_SET (SImode,
  3149. + stack_pointer_rtx,
  3150. + gen_rtx_PLUS (SImode,
  3151. + stack_pointer_rtx,
  3152. + GEN_INT (-4 * nr_regs)));
  3153. + RTX_FRAME_RELATED_P (tmp) = 1;
  3154. + XVECEXP (dwarf, 0, 0) = tmp;
  3155. + REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR, dwarf,
  3156. + REG_NOTES (insn));
  3157. + return insn;
  3158. +}
  3159. +
  3160. +rtx
  3161. +avr32_gen_load_multiple (rtx * regs, int count, rtx from,
  3162. + int write_back, int in_struct_p, int scalar_p)
  3163. +{
  3164. +
  3165. + rtx result;
  3166. + int i = 0, j;
  3167. +
  3168. + result =
  3169. + gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (count + (write_back ? 1 : 0)));
  3170. +
  3171. + if (write_back)
  3172. + {
  3173. + XVECEXP (result, 0, 0)
  3174. + = gen_rtx_SET (GET_MODE (from), from,
  3175. + plus_constant (from, count * 4));
  3176. + i = 1;
  3177. + count++;
  3178. + }
  3179. +
  3180. +
  3181. + for (j = 0; i < count; i++, j++)
  3182. + {
  3183. + rtx unspec;
  3184. + rtx mem = gen_rtx_MEM (SImode, plus_constant (from, j * 4));
  3185. + MEM_IN_STRUCT_P (mem) = in_struct_p;
  3186. + MEM_SCALAR_P (mem) = scalar_p;
  3187. + unspec = gen_rtx_UNSPEC (VOIDmode, gen_rtvec (1, mem), UNSPEC_LDM);
  3188. + XVECEXP (result, 0, i) = gen_rtx_SET (VOIDmode, regs[j], unspec);
  3189. + }
  3190. +
  3191. + return result;
  3192. +}
  3193. +
  3194. +
  3195. +rtx
  3196. +avr32_gen_store_multiple (rtx * regs, int count, rtx to,
  3197. + int in_struct_p, int scalar_p)
  3198. +{
  3199. + rtx result;
  3200. + int i = 0, j;
  3201. +
  3202. + result = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (count));
  3203. +
  3204. + for (j = 0; i < count; i++, j++)
  3205. + {
  3206. + rtx mem = gen_rtx_MEM (SImode, plus_constant (to, j * 4));
  3207. + MEM_IN_STRUCT_P (mem) = in_struct_p;
  3208. + MEM_SCALAR_P (mem) = scalar_p;
  3209. + XVECEXP (result, 0, i)
  3210. + = gen_rtx_SET (VOIDmode, mem,
  3211. + gen_rtx_UNSPEC (VOIDmode,
  3212. + gen_rtvec (1, regs[j]),
  3213. + UNSPEC_STORE_MULTIPLE));
  3214. + }
  3215. +
  3216. + return result;
  3217. +}
  3218. +
  3219. +
  3220. +/* Move a block of memory if it is word aligned or we support unaligned
  3221. + word memory accesses. The size must be maximum 64 bytes. */
  3222. +int
  3223. +avr32_gen_movmemsi (rtx * operands)
  3224. +{
  3225. + HOST_WIDE_INT bytes_to_go;
  3226. + rtx src, dst;
  3227. + rtx st_src, st_dst;
  3228. + int src_offset = 0, dst_offset = 0;
  3229. + int block_size;
  3230. + int dst_in_struct_p, src_in_struct_p;
  3231. + int dst_scalar_p, src_scalar_p;
  3232. + int unaligned;
  3233. +
  3234. + if (GET_CODE (operands[2]) != CONST_INT
  3235. + || GET_CODE (operands[3]) != CONST_INT
  3236. + || INTVAL (operands[2]) > 64
  3237. + || ((INTVAL (operands[3]) & 3) && !TARGET_UNALIGNED_WORD))
  3238. + return 0;
  3239. +
  3240. + unaligned = (INTVAL (operands[3]) & 3) != 0;
  3241. +
  3242. + block_size = 4;
  3243. +
  3244. + st_dst = XEXP (operands[0], 0);
  3245. + st_src = XEXP (operands[1], 0);
  3246. +
  3247. + dst_in_struct_p = MEM_IN_STRUCT_P (operands[0]);
  3248. + dst_scalar_p = MEM_SCALAR_P (operands[0]);
  3249. + src_in_struct_p = MEM_IN_STRUCT_P (operands[1]);
  3250. + src_scalar_p = MEM_SCALAR_P (operands[1]);
  3251. +
  3252. + dst = copy_to_mode_reg (SImode, st_dst);
  3253. + src = copy_to_mode_reg (SImode, st_src);
  3254. +
  3255. + bytes_to_go = INTVAL (operands[2]);
  3256. +
  3257. + while (bytes_to_go)
  3258. + {
  3259. + enum machine_mode move_mode;
  3260. + /* (Seems to be a problem with reloads for the movti pattern so this is
  3261. + disabled until that problem is resolved)
  3262. + UPDATE: Problem seems to be solved now.... */
  3263. + if (bytes_to_go >= GET_MODE_SIZE (TImode) && !unaligned
  3264. + /* Do not emit ldm/stm for UC3 as ld.d/st.d is more optimal. */
  3265. + && !TARGET_ARCH_UC)
  3266. + move_mode = TImode;
  3267. + else if ((bytes_to_go >= GET_MODE_SIZE (DImode)) && !unaligned)
  3268. + move_mode = DImode;
  3269. + else if (bytes_to_go >= GET_MODE_SIZE (SImode))
  3270. + move_mode = SImode;
  3271. + else
  3272. + move_mode = QImode;
  3273. +
  3274. + {
  3275. + rtx src_mem;
  3276. + rtx dst_mem = gen_rtx_MEM (move_mode,
  3277. + gen_rtx_PLUS (SImode, dst,
  3278. + GEN_INT (dst_offset)));
  3279. + dst_offset += GET_MODE_SIZE (move_mode);
  3280. + if ( 0 /* This causes an error in GCC. Think there is
  3281. + something wrong in the gcse pass which causes REQ_EQUIV notes
  3282. + to be wrong so disabling it for now. */
  3283. + && move_mode == TImode
  3284. + && INTVAL (operands[2]) > GET_MODE_SIZE (TImode) )
  3285. + {
  3286. + src_mem = gen_rtx_MEM (move_mode,
  3287. + gen_rtx_POST_INC (SImode, src));
  3288. + }
  3289. + else
  3290. + {
  3291. + src_mem = gen_rtx_MEM (move_mode,
  3292. + gen_rtx_PLUS (SImode, src,
  3293. + GEN_INT (src_offset)));
  3294. + src_offset += GET_MODE_SIZE (move_mode);
  3295. + }
  3296. +
  3297. + bytes_to_go -= GET_MODE_SIZE (move_mode);
  3298. +
  3299. + MEM_IN_STRUCT_P (dst_mem) = dst_in_struct_p;
  3300. + MEM_SCALAR_P (dst_mem) = dst_scalar_p;
  3301. +
  3302. + MEM_IN_STRUCT_P (src_mem) = src_in_struct_p;
  3303. + MEM_SCALAR_P (src_mem) = src_scalar_p;
  3304. + emit_move_insn (dst_mem, src_mem);
  3305. +
  3306. + }
  3307. + }
  3308. +
  3309. + return 1;
  3310. +}
  3311. +
  3312. +
  3313. +/* Expand the prologue instruction. */
  3314. +void
  3315. +avr32_expand_prologue (void)
  3316. +{
  3317. + rtx insn, dwarf;
  3318. + unsigned long saved_reg_mask;
  3319. + int reglist8 = 0;
  3320. +
  3321. + /* Naked functions do not have a prologue. */
  3322. + if (IS_NAKED (avr32_current_func_type ()))
  3323. + return;
  3324. +
  3325. + saved_reg_mask = avr32_compute_save_reg_mask (TRUE);
  3326. +
  3327. + if (saved_reg_mask)
  3328. + {
  3329. + /* Must push used registers. */
  3330. +
  3331. + /* Should we use POPM or LDM? */
  3332. + int usePUSHM = TRUE;
  3333. + reglist8 = 0;
  3334. + if (((saved_reg_mask & (1 << 0)) ||
  3335. + (saved_reg_mask & (1 << 1)) ||
  3336. + (saved_reg_mask & (1 << 2)) || (saved_reg_mask & (1 << 3))))
  3337. + {
  3338. + /* One of R0-R3 should at least be pushed. */
  3339. + if (((saved_reg_mask & (1 << 0)) &&
  3340. + (saved_reg_mask & (1 << 1)) &&
  3341. + (saved_reg_mask & (1 << 2)) && (saved_reg_mask & (1 << 3))))
  3342. + {
  3343. + /* All should be pushed. */
  3344. + reglist8 |= 0x01;
  3345. + }
  3346. + else
  3347. + {
  3348. + usePUSHM = FALSE;
  3349. + }
  3350. + }
  3351. +
  3352. + if (((saved_reg_mask & (1 << 4)) ||
  3353. + (saved_reg_mask & (1 << 5)) ||
  3354. + (saved_reg_mask & (1 << 6)) || (saved_reg_mask & (1 << 7))))
  3355. + {
  3356. + /* One of R4-R7 should at least be pushed */
  3357. + if (((saved_reg_mask & (1 << 4)) &&
  3358. + (saved_reg_mask & (1 << 5)) &&
  3359. + (saved_reg_mask & (1 << 6)) && (saved_reg_mask & (1 << 7))))
  3360. + {
  3361. + if (usePUSHM)
  3362. + /* All should be pushed */
  3363. + reglist8 |= 0x02;
  3364. + }
  3365. + else
  3366. + {
  3367. + usePUSHM = FALSE;
  3368. + }
  3369. + }
  3370. +
  3371. + if (((saved_reg_mask & (1 << 8)) || (saved_reg_mask & (1 << 9))))
  3372. + {
  3373. + /* One of R8-R9 should at least be pushed. */
  3374. + if (((saved_reg_mask & (1 << 8)) && (saved_reg_mask & (1 << 9))))
  3375. + {
  3376. + if (usePUSHM)
  3377. + /* All should be pushed. */
  3378. + reglist8 |= 0x04;
  3379. + }
  3380. + else
  3381. + {
  3382. + usePUSHM = FALSE;
  3383. + }
  3384. + }
  3385. +
  3386. + if (saved_reg_mask & (1 << 10))
  3387. + reglist8 |= 0x08;
  3388. +
  3389. + if (saved_reg_mask & (1 << 11))
  3390. + reglist8 |= 0x10;
  3391. +
  3392. + if (saved_reg_mask & (1 << 12))
  3393. + reglist8 |= 0x20;
  3394. +
  3395. + if ((saved_reg_mask & (1 << ASM_REGNUM (LR_REGNUM)))
  3396. + && !IS_FLASHVAULT (avr32_current_func_type ()))
  3397. + {
  3398. + /* Push LR */
  3399. + reglist8 |= 0x40;
  3400. + }
  3401. +
  3402. + if (usePUSHM)
  3403. + {
  3404. + insn = emit_multi_reg_push (reglist8, TRUE);
  3405. + }
  3406. + else
  3407. + {
  3408. + insn = emit_multi_reg_push (saved_reg_mask, FALSE);
  3409. + }
  3410. + RTX_FRAME_RELATED_P (insn) = 1;
  3411. +
  3412. + /* Prevent this instruction from being scheduled after any other
  3413. + instructions. */
  3414. + emit_insn (gen_blockage ());
  3415. + }
  3416. +
  3417. + /* Set frame pointer */
  3418. + if (frame_pointer_needed)
  3419. + {
  3420. + insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx);
  3421. + RTX_FRAME_RELATED_P (insn) = 1;
  3422. + }
  3423. +
  3424. + if (get_frame_size () > 0)
  3425. + {
  3426. + if (avr32_const_ok_for_constraint_p (get_frame_size (), 'K', "Ks21"))
  3427. + {
  3428. + insn = emit_insn (gen_rtx_SET (SImode,
  3429. + stack_pointer_rtx,
  3430. + gen_rtx_PLUS (SImode,
  3431. + stack_pointer_rtx,
  3432. + gen_rtx_CONST_INT
  3433. + (SImode,
  3434. + -get_frame_size
  3435. + ()))));
  3436. + RTX_FRAME_RELATED_P (insn) = 1;
  3437. + }
  3438. + else
  3439. + {
  3440. + /* Immediate is larger than k21 We must either check if we can use
  3441. + one of the pushed reegisters as temporary storage or we must
  3442. + make us a temp register by pushing a register to the stack. */
  3443. + rtx temp_reg, const_pool_entry, insn;
  3444. + if (saved_reg_mask)
  3445. + {
  3446. + temp_reg =
  3447. + gen_rtx_REG (SImode,
  3448. + INTERNAL_REGNUM (avr32_get_saved_reg
  3449. + (saved_reg_mask)));
  3450. + }
  3451. + else
  3452. + {
  3453. + temp_reg = gen_rtx_REG (SImode, INTERNAL_REGNUM (7));
  3454. + emit_move_insn (gen_rtx_MEM
  3455. + (SImode,
  3456. + gen_rtx_PRE_DEC (SImode, stack_pointer_rtx)),
  3457. + temp_reg);
  3458. + }
  3459. +
  3460. + const_pool_entry =
  3461. + force_const_mem (SImode,
  3462. + gen_rtx_CONST_INT (SImode, get_frame_size ()));
  3463. + emit_move_insn (temp_reg, const_pool_entry);
  3464. +
  3465. + insn = emit_insn (gen_rtx_SET (SImode,
  3466. + stack_pointer_rtx,
  3467. + gen_rtx_MINUS (SImode,
  3468. + stack_pointer_rtx,
  3469. + temp_reg)));
  3470. +
  3471. + dwarf = gen_rtx_SET (VOIDmode, stack_pointer_rtx,
  3472. + gen_rtx_PLUS (SImode, stack_pointer_rtx,
  3473. + GEN_INT (-get_frame_size ())));
  3474. + REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR,
  3475. + dwarf, REG_NOTES (insn));
  3476. + RTX_FRAME_RELATED_P (insn) = 1;
  3477. +
  3478. + if (!saved_reg_mask)
  3479. + {
  3480. + insn =
  3481. + emit_move_insn (temp_reg,
  3482. + gen_rtx_MEM (SImode,
  3483. + gen_rtx_POST_INC (SImode,
  3484. + gen_rtx_REG
  3485. + (SImode,
  3486. + 13))));
  3487. + }
  3488. +
  3489. + /* Mark the temp register as dead */
  3490. + REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_DEAD, temp_reg,
  3491. + REG_NOTES (insn));
  3492. +
  3493. +
  3494. + }
  3495. +
  3496. + /* Prevent the the stack adjustment to be scheduled after any
  3497. + instructions using the frame pointer. */
  3498. + emit_insn (gen_blockage ());
  3499. + }
  3500. +
  3501. + /* Load GOT */
  3502. + if (flag_pic)
  3503. + {
  3504. + avr32_load_pic_register ();
  3505. +
  3506. + /* gcc does not know that load or call instructions might use the pic
  3507. + register so it might schedule these instructions before the loading
  3508. + of the pic register. To avoid this emit a barrier for now. TODO!
  3509. + Find out a better way to let gcc know which instructions might use
  3510. + the pic register. */
  3511. + emit_insn (gen_blockage ());
  3512. + }
  3513. + return;
  3514. +}
  3515. +
  3516. +
  3517. +void
  3518. +avr32_set_return_address (rtx source, rtx scratch)
  3519. +{
  3520. + rtx addr;
  3521. + unsigned long saved_regs;
  3522. +
  3523. + saved_regs = avr32_compute_save_reg_mask (TRUE);
  3524. +
  3525. + if (!(saved_regs & (1 << ASM_REGNUM (LR_REGNUM))))
  3526. + emit_move_insn (gen_rtx_REG (Pmode, LR_REGNUM), source);
  3527. + else
  3528. + {
  3529. + if (frame_pointer_needed)
  3530. + addr = gen_rtx_REG (Pmode, FRAME_POINTER_REGNUM);
  3531. + else
  3532. + if (avr32_const_ok_for_constraint_p (get_frame_size (), 'K', "Ks16"))
  3533. + {
  3534. + addr = plus_constant (stack_pointer_rtx, get_frame_size ());
  3535. + }
  3536. + else
  3537. + {
  3538. + emit_insn (gen_movsi (scratch, GEN_INT (get_frame_size ())));
  3539. + addr = scratch;
  3540. + }
  3541. + emit_move_insn (gen_rtx_MEM (Pmode, addr), source);
  3542. + }
  3543. +}
  3544. +
  3545. +
  3546. +/* Return the length of INSN. LENGTH is the initial length computed by
  3547. + attributes in the machine-description file. */
  3548. +int
  3549. +avr32_adjust_insn_length (rtx insn ATTRIBUTE_UNUSED,
  3550. + int length ATTRIBUTE_UNUSED)
  3551. +{
  3552. + return length;
  3553. +}
  3554. +
  3555. +
  3556. +void
  3557. +avr32_output_return_instruction (int single_ret_inst ATTRIBUTE_UNUSED,
  3558. + int iscond ATTRIBUTE_UNUSED,
  3559. + rtx cond ATTRIBUTE_UNUSED, rtx r12_imm)
  3560. +{
  3561. +
  3562. + unsigned long saved_reg_mask;
  3563. + int insert_ret = TRUE;
  3564. + int reglist8 = 0;
  3565. + int stack_adjustment = get_frame_size ();
  3566. + unsigned int func_type = avr32_current_func_type ();
  3567. + FILE *f = asm_out_file;
  3568. +
  3569. + /* Naked functions does not have an epilogue */
  3570. + if (IS_NAKED (func_type))
  3571. + return;
  3572. +
  3573. + saved_reg_mask = avr32_compute_save_reg_mask (FALSE);
  3574. +
  3575. + /* Reset frame pointer */
  3576. + if (stack_adjustment > 0)
  3577. + {
  3578. + if (avr32_const_ok_for_constraint_p (stack_adjustment, 'I', "Is21"))
  3579. + {
  3580. + fprintf (f, "\tsub\tsp, %i # Reset Frame Pointer\n",
  3581. + -stack_adjustment);
  3582. + }
  3583. + else
  3584. + {
  3585. + /* TODO! Is it safe to use r8 as scratch?? */
  3586. + fprintf (f, "\tmov\tr8, lo(%i) # Reset Frame Pointer\n",
  3587. + -stack_adjustment);
  3588. + fprintf (f, "\torh\tr8, hi(%i) # Reset Frame Pointer\n",
  3589. + -stack_adjustment);
  3590. + fprintf (f, "\tadd\tsp, r8 # Reset Frame Pointer\n");
  3591. + }
  3592. + }
  3593. +
  3594. + if (saved_reg_mask)
  3595. + {
  3596. + /* Must pop used registers */
  3597. +
  3598. + /* Should we use POPM or LDM? */
  3599. + int usePOPM = TRUE;
  3600. + if (((saved_reg_mask & (1 << 0)) ||
  3601. + (saved_reg_mask & (1 << 1)) ||
  3602. + (saved_reg_mask & (1 << 2)) || (saved_reg_mask & (1 << 3))))
  3603. + {
  3604. + /* One of R0-R3 should at least be popped */
  3605. + if (((saved_reg_mask & (1 << 0)) &&
  3606. + (saved_reg_mask & (1 << 1)) &&
  3607. + (saved_reg_mask & (1 << 2)) && (saved_reg_mask & (1 << 3))))
  3608. + {
  3609. + /* All should be popped */
  3610. + reglist8 |= 0x01;
  3611. + }
  3612. + else
  3613. + {
  3614. + usePOPM = FALSE;
  3615. + }
  3616. + }
  3617. +
  3618. + if (((saved_reg_mask & (1 << 4)) ||
  3619. + (saved_reg_mask & (1 << 5)) ||
  3620. + (saved_reg_mask & (1 << 6)) || (saved_reg_mask & (1 << 7))))
  3621. + {
  3622. + /* One of R0-R3 should at least be popped */
  3623. + if (((saved_reg_mask & (1 << 4)) &&
  3624. + (saved_reg_mask & (1 << 5)) &&
  3625. + (saved_reg_mask & (1 << 6)) && (saved_reg_mask & (1 << 7))))
  3626. + {
  3627. + if (usePOPM)
  3628. + /* All should be popped */
  3629. + reglist8 |= 0x02;
  3630. + }
  3631. + else
  3632. + {
  3633. + usePOPM = FALSE;
  3634. + }
  3635. + }
  3636. +
  3637. + if (((saved_reg_mask & (1 << 8)) || (saved_reg_mask & (1 << 9))))
  3638. + {
  3639. + /* One of R8-R9 should at least be pushed */
  3640. + if (((saved_reg_mask & (1 << 8)) && (saved_reg_mask & (1 << 9))))
  3641. + {
  3642. + if (usePOPM)
  3643. + /* All should be pushed */
  3644. + reglist8 |= 0x04;
  3645. + }
  3646. + else
  3647. + {
  3648. + usePOPM = FALSE;
  3649. + }
  3650. + }
  3651. +
  3652. + if (saved_reg_mask & (1 << 10))
  3653. + reglist8 |= 0x08;
  3654. +
  3655. + if (saved_reg_mask & (1 << 11))
  3656. + reglist8 |= 0x10;
  3657. +
  3658. + if (saved_reg_mask & (1 << 12))
  3659. + reglist8 |= 0x20;
  3660. +
  3661. + if (saved_reg_mask & (1 << ASM_REGNUM (LR_REGNUM)))
  3662. + /* Pop LR */
  3663. + reglist8 |= 0x40;
  3664. +
  3665. + if ((saved_reg_mask & (1 << ASM_REGNUM (PC_REGNUM)))
  3666. + && !IS_FLASHVAULT_IMPL (func_type))
  3667. + /* Pop LR into PC. */
  3668. + reglist8 |= 0x80;
  3669. +
  3670. + if (usePOPM)
  3671. + {
  3672. + char reglist[64]; /* 64 bytes should be enough... */
  3673. + avr32_make_reglist8 (reglist8, (char *) reglist);
  3674. +
  3675. + if (reglist8 & 0x80)
  3676. + /* This instruction is also a return */
  3677. + insert_ret = FALSE;
  3678. +
  3679. + if (r12_imm && !insert_ret)
  3680. + fprintf (f, "\tpopm\t%s, r12=%li\n", reglist, INTVAL (r12_imm));
  3681. + else
  3682. + fprintf (f, "\tpopm\t%s\n", reglist);
  3683. +
  3684. + }
  3685. + else
  3686. + {
  3687. + char reglist[64]; /* 64 bytes should be enough... */
  3688. + avr32_make_reglist16 (saved_reg_mask, (char *) reglist);
  3689. + if (saved_reg_mask & (1 << ASM_REGNUM (PC_REGNUM)))
  3690. + /* This instruction is also a return */
  3691. + insert_ret = FALSE;
  3692. +
  3693. + if (r12_imm && !insert_ret)
  3694. + fprintf (f, "\tldm\tsp++, %s, r12=%li\n", reglist,
  3695. + INTVAL (r12_imm));
  3696. + else
  3697. + fprintf (f, "\tldm\tsp++, %s\n", reglist);
  3698. +
  3699. + }
  3700. +
  3701. + }
  3702. +
  3703. + /* Stack adjustment for exception handler. */
  3704. + if (crtl->calls_eh_return)
  3705. + fprintf (f, "\tadd\tsp, r%d\n", ASM_REGNUM (EH_RETURN_STACKADJ_REGNO));
  3706. +
  3707. +
  3708. + if (IS_INTERRUPT (func_type))
  3709. + {
  3710. + fprintf (f, "\trete\n");
  3711. + }
  3712. + else if (IS_FLASHVAULT (func_type))
  3713. + {
  3714. + /* Normal return from Secure System call, increment SS_RAR before
  3715. + returning. Use R8 as scratch. */
  3716. + fprintf (f,
  3717. + "\t# Normal return from sscall.\n"
  3718. + "\t# Increment SS_RAR before returning.\n"
  3719. + "\t# Use R8 as scratch.\n"
  3720. + "\tmfsr\tr8, 440\n"
  3721. + "\tsub\tr8, -2\n"
  3722. + "\tmtsr\t440, r8\n"
  3723. + "\tretss\n");
  3724. + }
  3725. + else if (insert_ret)
  3726. + {
  3727. + if (r12_imm)
  3728. + fprintf (f, "\tretal\t%li\n", INTVAL (r12_imm));
  3729. + else
  3730. + fprintf (f, "\tretal\tr12\n");
  3731. + }
  3732. +}
  3733. +
  3734. +void
  3735. +avr32_make_reglist16 (int reglist16_vect, char *reglist16_string)
  3736. +{
  3737. + int i;
  3738. + bool first_reg = true;
  3739. + /* Make sure reglist16_string is empty. */
  3740. + reglist16_string[0] = '\0';
  3741. +
  3742. + for (i = 0; i < 16; ++i)
  3743. + {
  3744. + if (reglist16_vect & (1 << i))
  3745. + {
  3746. + first_reg == true ? first_reg = false : strcat(reglist16_string,", ");
  3747. + strcat (reglist16_string, reg_names[INTERNAL_REGNUM (i)]);
  3748. + }
  3749. + }
  3750. +}
  3751. +
  3752. +int
  3753. +avr32_convert_to_reglist16 (int reglist8_vect)
  3754. +{
  3755. + int reglist16_vect = 0;
  3756. + if (reglist8_vect & 0x1)
  3757. + reglist16_vect |= 0xF;
  3758. + if (reglist8_vect & 0x2)
  3759. + reglist16_vect |= 0xF0;
  3760. + if (reglist8_vect & 0x4)
  3761. + reglist16_vect |= 0x300;
  3762. + if (reglist8_vect & 0x8)
  3763. + reglist16_vect |= 0x400;
  3764. + if (reglist8_vect & 0x10)
  3765. + reglist16_vect |= 0x800;
  3766. + if (reglist8_vect & 0x20)
  3767. + reglist16_vect |= 0x1000;
  3768. + if (reglist8_vect & 0x40)
  3769. + reglist16_vect |= 0x4000;
  3770. + if (reglist8_vect & 0x80)
  3771. + reglist16_vect |= 0x8000;
  3772. +
  3773. + return reglist16_vect;
  3774. +}
  3775. +
  3776. +void
  3777. +avr32_make_reglist8 (int reglist8_vect, char *reglist8_string)
  3778. +{
  3779. + /* Make sure reglist8_string is empty. */
  3780. + reglist8_string[0] = '\0';
  3781. +
  3782. + if (reglist8_vect & 0x1)
  3783. + strcpy (reglist8_string, "r0-r3");
  3784. + if (reglist8_vect & 0x2)
  3785. + strlen (reglist8_string) ? strcat (reglist8_string, ", r4-r7") :
  3786. + strcpy (reglist8_string, "r4-r7");
  3787. + if (reglist8_vect & 0x4)
  3788. + strlen (reglist8_string) ? strcat (reglist8_string, ", r8-r9") :
  3789. + strcpy (reglist8_string, "r8-r9");
  3790. + if (reglist8_vect & 0x8)
  3791. + strlen (reglist8_string) ? strcat (reglist8_string, ", r10") :
  3792. + strcpy (reglist8_string, "r10");
  3793. + if (reglist8_vect & 0x10)
  3794. + strlen (reglist8_string) ? strcat (reglist8_string, ", r11") :
  3795. + strcpy (reglist8_string, "r11");
  3796. + if (reglist8_vect & 0x20)
  3797. + strlen (reglist8_string) ? strcat (reglist8_string, ", r12") :
  3798. + strcpy (reglist8_string, "r12");
  3799. + if (reglist8_vect & 0x40)
  3800. + strlen (reglist8_string) ? strcat (reglist8_string, ", lr") :
  3801. + strcpy (reglist8_string, "lr");
  3802. + if (reglist8_vect & 0x80)
  3803. + strlen (reglist8_string) ? strcat (reglist8_string, ", pc") :
  3804. + strcpy (reglist8_string, "pc");
  3805. +}
  3806. +
  3807. +
  3808. +int
  3809. +avr32_eh_return_data_regno (int n)
  3810. +{
  3811. + if (n >= 0 && n <= 3)
  3812. + return 8 + n;
  3813. + else
  3814. + return INVALID_REGNUM;
  3815. +}
  3816. +
  3817. +
  3818. +/* Compute the distance from register FROM to register TO.
  3819. + These can be the arg pointer, the frame pointer or
  3820. + the stack pointer.
  3821. + Typical stack layout looks like this:
  3822. +
  3823. + old stack pointer -> | |
  3824. + ----
  3825. + | | \
  3826. + | | saved arguments for
  3827. + | | vararg functions
  3828. + arg_pointer -> | | /
  3829. + --
  3830. + | | \
  3831. + | | call saved
  3832. + | | registers
  3833. + | | /
  3834. + frame ptr -> --
  3835. + | | \
  3836. + | | local
  3837. + | | variables
  3838. + stack ptr --> | | /
  3839. + --
  3840. + | | \
  3841. + | | outgoing
  3842. + | | arguments
  3843. + | | /
  3844. + --
  3845. +
  3846. + For a given funciton some or all of these stack compomnents
  3847. + may not be needed, giving rise to the possibility of
  3848. + eliminating some of the registers.
  3849. +
  3850. + The values returned by this function must reflect the behaviour
  3851. + of avr32_expand_prologue() and avr32_compute_save_reg_mask().
  3852. +
  3853. + The sign of the number returned reflects the direction of stack
  3854. + growth, so the values are positive for all eliminations except
  3855. + from the soft frame pointer to the hard frame pointer. */
  3856. +int
  3857. +avr32_initial_elimination_offset (int from, int to)
  3858. +{
  3859. + int i;
  3860. + int call_saved_regs = 0;
  3861. + unsigned long saved_reg_mask;
  3862. + unsigned int local_vars = get_frame_size ();
  3863. +
  3864. + saved_reg_mask = avr32_compute_save_reg_mask (TRUE);
  3865. +
  3866. + for (i = 0; i < 16; ++i)
  3867. + {
  3868. + if (saved_reg_mask & (1 << i))
  3869. + call_saved_regs += 4;
  3870. + }
  3871. +
  3872. + switch (from)
  3873. + {
  3874. + case ARG_POINTER_REGNUM:
  3875. + switch (to)
  3876. + {
  3877. + case STACK_POINTER_REGNUM:
  3878. + return call_saved_regs + local_vars;
  3879. + case FRAME_POINTER_REGNUM:
  3880. + return call_saved_regs;
  3881. + default:
  3882. + abort ();
  3883. + }
  3884. + case FRAME_POINTER_REGNUM:
  3885. + switch (to)
  3886. + {
  3887. + case STACK_POINTER_REGNUM:
  3888. + return local_vars;
  3889. + default:
  3890. + abort ();
  3891. + }
  3892. + default:
  3893. + abort ();
  3894. + }
  3895. +}
  3896. +
  3897. +
  3898. +/*
  3899. + Returns a rtx used when passing the next argument to a function.
  3900. + avr32_init_cumulative_args() and avr32_function_arg_advance() sets which
  3901. + register to use.
  3902. +*/
  3903. +rtx
  3904. +avr32_function_arg (CUMULATIVE_ARGS * cum, enum machine_mode mode,
  3905. + tree type, int named)
  3906. +{
  3907. + int index = -1;
  3908. + //unsigned long func_type = avr32_current_func_type ();
  3909. + //int last_reg_index = (IS_FLASHVAULT(func_type) || IS_FLASHVAULT_IMPL(func_type) || cum->flashvault_func ? LAST_CUM_REG_INDEX - 1 : LAST_CUM_REG_INDEX);
  3910. + int last_reg_index = (cum->flashvault_func ? LAST_CUM_REG_INDEX - 1 : LAST_CUM_REG_INDEX);
  3911. +
  3912. + HOST_WIDE_INT arg_size, arg_rsize;
  3913. + if (type)
  3914. + {
  3915. + arg_size = int_size_in_bytes (type);
  3916. + }
  3917. + else
  3918. + {
  3919. + arg_size = GET_MODE_SIZE (mode);
  3920. + }
  3921. + arg_rsize = PUSH_ROUNDING (arg_size);
  3922. +
  3923. + /*
  3924. + The last time this macro is called, it is called with mode == VOIDmode,
  3925. + and its result is passed to the call or call_value pattern as operands 2
  3926. + and 3 respectively. */
  3927. + if (mode == VOIDmode)
  3928. + {
  3929. + return gen_rtx_CONST_INT (SImode, 22); /* ToDo: fixme. */
  3930. + }
  3931. +
  3932. + if ((*targetm.calls.must_pass_in_stack) (mode, type) || !named)
  3933. + {
  3934. + return NULL_RTX;
  3935. + }
  3936. +
  3937. + if (arg_rsize == 8)
  3938. + {
  3939. + /* use r11:r10 or r9:r8. */
  3940. + if (!(GET_USED_INDEX (cum, 1) || GET_USED_INDEX (cum, 2)))
  3941. + index = 1;
  3942. + else if ((last_reg_index == 4) &&
  3943. + !(GET_USED_INDEX (cum, 3) || GET_USED_INDEX (cum, 4)))
  3944. + index = 3;
  3945. + else
  3946. + index = -1;
  3947. + }
  3948. + else if (arg_rsize == 4)
  3949. + { /* Use first available register */
  3950. + index = 0;
  3951. + while (index <= last_reg_index && GET_USED_INDEX (cum, index))
  3952. + index++;
  3953. + if (index > last_reg_index)
  3954. + index = -1;
  3955. + }
  3956. +
  3957. + SET_REG_INDEX (cum, index);
  3958. +
  3959. + if (GET_REG_INDEX (cum) >= 0)
  3960. + return gen_rtx_REG (mode, avr32_function_arg_reglist[GET_REG_INDEX (cum)]);
  3961. +
  3962. + return NULL_RTX;
  3963. +}
  3964. +
  3965. +
  3966. +/* Set the register used for passing the first argument to a function. */
  3967. +void
  3968. +avr32_init_cumulative_args (CUMULATIVE_ARGS * cum,
  3969. + tree fntype ATTRIBUTE_UNUSED,
  3970. + rtx libname ATTRIBUTE_UNUSED,
  3971. + tree fndecl)
  3972. +{
  3973. + /* Set all registers as unused. */
  3974. + SET_INDEXES_UNUSED (cum);
  3975. +
  3976. + /* Reset uses_anonymous_args */
  3977. + cum->uses_anonymous_args = 0;
  3978. +
  3979. + /* Reset size of stack pushed arguments */
  3980. + cum->stack_pushed_args_size = 0;
  3981. +
  3982. + cum->flashvault_func = (fndecl && (has_attribute_p (fndecl,"flashvault") || has_attribute_p (fndecl,"flashvault_impl")));
  3983. +}
  3984. +
  3985. +
  3986. +/*
  3987. + Set register used for passing the next argument to a function. Only the
  3988. + Scratch Registers are used.
  3989. +
  3990. + number name
  3991. + 15 r15 PC
  3992. + 14 r14 LR
  3993. + 13 r13 _SP_________
  3994. + FIRST_CUM_REG 12 r12 _||_
  3995. + 10 r11 ||
  3996. + 11 r10 _||_ Scratch Registers
  3997. + 8 r9 ||
  3998. + LAST_SCRATCH_REG 9 r8 _\/_________
  3999. + 6 r7 /\
  4000. + 7 r6 ||
  4001. + 4 r5 ||
  4002. + 5 r4 ||
  4003. + 2 r3 ||
  4004. + 3 r2 ||
  4005. + 0 r1 ||
  4006. + 1 r0 _||_________
  4007. +
  4008. +*/
  4009. +void
  4010. +avr32_function_arg_advance (CUMULATIVE_ARGS * cum, enum machine_mode mode,
  4011. + tree type, int named ATTRIBUTE_UNUSED)
  4012. +{
  4013. + HOST_WIDE_INT arg_size, arg_rsize;
  4014. +
  4015. + if (type)
  4016. + {
  4017. + arg_size = int_size_in_bytes (type);
  4018. + }
  4019. + else
  4020. + {
  4021. + arg_size = GET_MODE_SIZE (mode);
  4022. + }
  4023. + arg_rsize = PUSH_ROUNDING (arg_size);
  4024. +
  4025. + /* If the argument had to be passed in stack, no register is used. */
  4026. + if ((*targetm.calls.must_pass_in_stack) (mode, type))
  4027. + {
  4028. + cum->stack_pushed_args_size += PUSH_ROUNDING (int_size_in_bytes (type));
  4029. + return;
  4030. + }
  4031. +
  4032. + /* Mark the used registers as "used". */
  4033. + if (GET_REG_INDEX (cum) >= 0)
  4034. + {
  4035. + SET_USED_INDEX (cum, GET_REG_INDEX (cum));
  4036. + if (arg_rsize == 8)
  4037. + {
  4038. + SET_USED_INDEX (cum, (GET_REG_INDEX (cum) + 1));
  4039. + }
  4040. + }
  4041. + else
  4042. + {
  4043. + /* Had to use stack */
  4044. + cum->stack_pushed_args_size += arg_rsize;
  4045. + }
  4046. +}
  4047. +
  4048. +
  4049. +/*
  4050. + Defines witch direction to go to find the next register to use if the
  4051. + argument is larger then one register or for arguments shorter than an
  4052. + int which is not promoted, such as the last part of structures with
  4053. + size not a multiple of 4. */
  4054. +enum direction
  4055. +avr32_function_arg_padding (enum machine_mode mode ATTRIBUTE_UNUSED,
  4056. + tree type)
  4057. +{
  4058. + /* Pad upward for all aggregates except byte and halfword sized aggregates
  4059. + which can be passed in registers. */
  4060. + if (type
  4061. + && AGGREGATE_TYPE_P (type)
  4062. + && (int_size_in_bytes (type) != 1)
  4063. + && !((int_size_in_bytes (type) == 2)
  4064. + && TYPE_ALIGN_UNIT (type) >= 2)
  4065. + && (int_size_in_bytes (type) & 0x3))
  4066. + {
  4067. + return upward;
  4068. + }
  4069. +
  4070. + return downward;
  4071. +}
  4072. +
  4073. +
  4074. +/* Return a rtx used for the return value from a function call. */
  4075. +rtx
  4076. +avr32_function_value (tree type, tree func, bool outgoing ATTRIBUTE_UNUSED)
  4077. +{
  4078. + if (avr32_return_in_memory (type, func))
  4079. + return NULL_RTX;
  4080. +
  4081. + if (int_size_in_bytes (type) <= 4)
  4082. + {
  4083. + enum machine_mode mode = TYPE_MODE (type);
  4084. + int unsignedp = 0;
  4085. + PROMOTE_FUNCTION_MODE (mode, unsignedp, type);
  4086. + return gen_rtx_REG (mode, RET_REGISTER);
  4087. + }
  4088. + else if (int_size_in_bytes (type) <= 8)
  4089. + return gen_rtx_REG (TYPE_MODE (type), INTERNAL_REGNUM (11));
  4090. +
  4091. + return NULL_RTX;
  4092. +}
  4093. +
  4094. +
  4095. +/* Return a rtx used for the return value from a library function call. */
  4096. +rtx
  4097. +avr32_libcall_value (enum machine_mode mode)
  4098. +{
  4099. +
  4100. + if (GET_MODE_SIZE (mode) <= 4)
  4101. + return gen_rtx_REG (mode, RET_REGISTER);
  4102. + else if (GET_MODE_SIZE (mode) <= 8)
  4103. + return gen_rtx_REG (mode, INTERNAL_REGNUM (11));
  4104. + else
  4105. + return NULL_RTX;
  4106. +}
  4107. +
  4108. +
  4109. +/* Return TRUE if X references a SYMBOL_REF. */
  4110. +int
  4111. +symbol_mentioned_p (rtx x)
  4112. +{
  4113. + const char *fmt;
  4114. + int i;
  4115. +
  4116. + if (GET_CODE (x) == SYMBOL_REF)
  4117. + return 1;
  4118. +
  4119. + fmt = GET_RTX_FORMAT (GET_CODE (x));
  4120. +
  4121. + for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
  4122. + {
  4123. + if (fmt[i] == 'E')
  4124. + {
  4125. + int j;
  4126. +
  4127. + for (j = XVECLEN (x, i) - 1; j >= 0; j--)
  4128. + if (symbol_mentioned_p (XVECEXP (x, i, j)))
  4129. + return 1;
  4130. + }
  4131. + else if (fmt[i] == 'e' && symbol_mentioned_p (XEXP (x, i)))
  4132. + return 1;
  4133. + }
  4134. +
  4135. + return 0;
  4136. +}
  4137. +
  4138. +
  4139. +/* Return TRUE if X references a LABEL_REF. */
  4140. +int
  4141. +label_mentioned_p (rtx x)
  4142. +{
  4143. + const char *fmt;
  4144. + int i;
  4145. +
  4146. + if (GET_CODE (x) == LABEL_REF)
  4147. + return 1;
  4148. +
  4149. + fmt = GET_RTX_FORMAT (GET_CODE (x));
  4150. + for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
  4151. + {
  4152. + if (fmt[i] == 'E')
  4153. + {
  4154. + int j;
  4155. +
  4156. + for (j = XVECLEN (x, i) - 1; j >= 0; j--)
  4157. + if (label_mentioned_p (XVECEXP (x, i, j)))
  4158. + return 1;
  4159. + }
  4160. + else if (fmt[i] == 'e' && label_mentioned_p (XEXP (x, i)))
  4161. + return 1;
  4162. + }
  4163. +
  4164. + return 0;
  4165. +}
  4166. +
  4167. +
  4168. +/* Return TRUE if X contains a MEM expression. */
  4169. +int
  4170. +mem_mentioned_p (rtx x)
  4171. +{
  4172. + const char *fmt;
  4173. + int i;
  4174. +
  4175. + if (MEM_P (x))
  4176. + return 1;
  4177. +
  4178. + fmt = GET_RTX_FORMAT (GET_CODE (x));
  4179. + for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
  4180. + {
  4181. + if (fmt[i] == 'E')
  4182. + {
  4183. + int j;
  4184. +
  4185. + for (j = XVECLEN (x, i) - 1; j >= 0; j--)
  4186. + if (mem_mentioned_p (XVECEXP (x, i, j)))
  4187. + return 1;
  4188. + }
  4189. + else if (fmt[i] == 'e' && mem_mentioned_p (XEXP (x, i)))
  4190. + return 1;
  4191. + }
  4192. +
  4193. + return 0;
  4194. +}
  4195. +
  4196. +
  4197. +int
  4198. +avr32_legitimate_pic_operand_p (rtx x)
  4199. +{
  4200. +
  4201. + /* We can't have const, this must be broken down to a symbol. */
  4202. + if (GET_CODE (x) == CONST)
  4203. + return FALSE;
  4204. +
  4205. + /* Can't access symbols or labels via the constant pool either */
  4206. + if ((GET_CODE (x) == SYMBOL_REF
  4207. + && CONSTANT_POOL_ADDRESS_P (x)
  4208. + && (symbol_mentioned_p (get_pool_constant (x))
  4209. + || label_mentioned_p (get_pool_constant (x)))))
  4210. + return FALSE;
  4211. +
  4212. + return TRUE;
  4213. +}
  4214. +
  4215. +
  4216. +rtx
  4217. +legitimize_pic_address (rtx orig, enum machine_mode mode ATTRIBUTE_UNUSED,
  4218. + rtx reg)
  4219. +{
  4220. +
  4221. + if (GET_CODE (orig) == SYMBOL_REF || GET_CODE (orig) == LABEL_REF)
  4222. + {
  4223. + int subregs = 0;
  4224. +
  4225. + if (reg == 0)
  4226. + {
  4227. + if (!can_create_pseudo_p ())
  4228. + abort ();
  4229. + else
  4230. + reg = gen_reg_rtx (Pmode);
  4231. +
  4232. + subregs = 1;
  4233. + }
  4234. +
  4235. + emit_move_insn (reg, orig);
  4236. +
  4237. + /* Only set current function as using pic offset table if flag_pic is
  4238. + set. This is because this function is also used if
  4239. + TARGET_HAS_ASM_ADDR_PSEUDOS is set. */
  4240. + if (flag_pic)
  4241. + crtl->uses_pic_offset_table = 1;
  4242. +
  4243. + /* Put a REG_EQUAL note on this insn, so that it can be optimized by
  4244. + loop. */
  4245. + return reg;
  4246. + }
  4247. + else if (GET_CODE (orig) == CONST)
  4248. + {
  4249. + rtx base, offset;
  4250. +
  4251. + if (flag_pic
  4252. + && GET_CODE (XEXP (orig, 0)) == PLUS
  4253. + && XEXP (XEXP (orig, 0), 0) == pic_offset_table_rtx)
  4254. + return orig;
  4255. +
  4256. + if (reg == 0)
  4257. + {
  4258. + if (!can_create_pseudo_p ())
  4259. + abort ();
  4260. + else
  4261. + reg = gen_reg_rtx (Pmode);
  4262. + }
  4263. +
  4264. + if (GET_CODE (XEXP (orig, 0)) == PLUS)
  4265. + {
  4266. + base =
  4267. + legitimize_pic_address (XEXP (XEXP (orig, 0), 0), Pmode, reg);
  4268. + offset =
  4269. + legitimize_pic_address (XEXP (XEXP (orig, 0), 1), Pmode,
  4270. + base == reg ? 0 : reg);
  4271. + }
  4272. + else
  4273. + abort ();
  4274. +
  4275. + if (GET_CODE (offset) == CONST_INT)
  4276. + {
  4277. + /* The base register doesn't really matter, we only want to test
  4278. + the index for the appropriate mode. */
  4279. + if (!avr32_const_ok_for_constraint_p (INTVAL (offset), 'I', "Is21"))
  4280. + {
  4281. + if (can_create_pseudo_p ())
  4282. + offset = force_reg (Pmode, offset);
  4283. + else
  4284. + abort ();
  4285. + }
  4286. +
  4287. + if (GET_CODE (offset) == CONST_INT)
  4288. + return plus_constant (base, INTVAL (offset));
  4289. + }
  4290. +
  4291. + return gen_rtx_PLUS (Pmode, base, offset);
  4292. + }
  4293. +
  4294. + return orig;
  4295. +}
  4296. +
  4297. +
  4298. +/* Generate code to load the PIC register. */
  4299. +void
  4300. +avr32_load_pic_register (void)
  4301. +{
  4302. + rtx l1, pic_tmp;
  4303. + rtx global_offset_table;
  4304. +
  4305. + if ((crtl->uses_pic_offset_table == 0) || TARGET_NO_INIT_GOT)
  4306. + return;
  4307. +
  4308. + if (!flag_pic)
  4309. + abort ();
  4310. +
  4311. + l1 = gen_label_rtx ();
  4312. +
  4313. + global_offset_table = gen_rtx_SYMBOL_REF (Pmode, "_GLOBAL_OFFSET_TABLE_");
  4314. + pic_tmp =
  4315. + gen_rtx_CONST (Pmode,
  4316. + gen_rtx_MINUS (SImode, gen_rtx_LABEL_REF (Pmode, l1),
  4317. + global_offset_table));
  4318. + emit_insn (gen_pic_load_addr
  4319. + (pic_offset_table_rtx, force_const_mem (SImode, pic_tmp)));
  4320. + emit_insn (gen_pic_compute_got_from_pc (pic_offset_table_rtx, l1));
  4321. +
  4322. + /* Need to emit this whether or not we obey regdecls, since setjmp/longjmp
  4323. + can cause life info to screw up. */
  4324. + emit_insn (gen_rtx_USE (VOIDmode, pic_offset_table_rtx));
  4325. +}
  4326. +
  4327. +
  4328. +/* This hook should return true if values of type type are returned at the most
  4329. + significant end of a register (in other words, if they are padded at the
  4330. + least significant end). You can assume that type is returned in a register;
  4331. + the caller is required to check this. Note that the register provided by
  4332. + FUNCTION_VALUE must be able to hold the complete return value. For example,
  4333. + if a 1-, 2- or 3-byte structure is returned at the most significant end of a
  4334. + 4-byte register, FUNCTION_VALUE should provide an SImode rtx. */
  4335. +bool
  4336. +avr32_return_in_msb (tree type ATTRIBUTE_UNUSED)
  4337. +{
  4338. + /* if ( AGGREGATE_TYPE_P (type) ) if ((int_size_in_bytes(type) == 1) ||
  4339. + ((int_size_in_bytes(type) == 2) && TYPE_ALIGN_UNIT(type) >= 2)) return
  4340. + false; else return true; */
  4341. +
  4342. + return false;
  4343. +}
  4344. +
  4345. +
  4346. +/*
  4347. + Returns one if a certain function value is going to be returned in memory
  4348. + and zero if it is going to be returned in a register.
  4349. +
  4350. + BLKmode and all other modes that is larger than 64 bits are returned in
  4351. + memory.
  4352. +*/
  4353. +bool
  4354. +avr32_return_in_memory (tree type, tree fntype ATTRIBUTE_UNUSED)
  4355. +{
  4356. + if (TYPE_MODE (type) == VOIDmode)
  4357. + return false;
  4358. +
  4359. + if (int_size_in_bytes (type) > (2 * UNITS_PER_WORD)
  4360. + || int_size_in_bytes (type) == -1)
  4361. + {
  4362. + return true;
  4363. + }
  4364. +
  4365. + /* If we have an aggregate then use the same mechanism as when checking if
  4366. + it should be passed on the stack. */
  4367. + if (type
  4368. + && AGGREGATE_TYPE_P (type)
  4369. + && (*targetm.calls.must_pass_in_stack) (TYPE_MODE (type), type))
  4370. + return true;
  4371. +
  4372. + return false;
  4373. +}
  4374. +
  4375. +
  4376. +/* Output the constant part of the trampoline.
  4377. + lddpc r0, pc[0x8:e] ; load static chain register
  4378. + lddpc pc, pc[0x8:e] ; jump to subrutine
  4379. + .long 0 ; Address to static chain,
  4380. + ; filled in by avr32_initialize_trampoline()
  4381. + .long 0 ; Address to subrutine,
  4382. + ; filled in by avr32_initialize_trampoline()
  4383. +*/
  4384. +void
  4385. +avr32_trampoline_template (FILE * file)
  4386. +{
  4387. + fprintf (file, "\tlddpc r0, pc[8]\n");
  4388. + fprintf (file, "\tlddpc pc, pc[8]\n");
  4389. + /* make room for the address of the static chain. */
  4390. + fprintf (file, "\t.long\t0\n");
  4391. + /* make room for the address to the subrutine. */
  4392. + fprintf (file, "\t.long\t0\n");
  4393. +}
  4394. +
  4395. +
  4396. +/* Initialize the variable parts of a trampoline. */
  4397. +void
  4398. +avr32_initialize_trampoline (rtx addr, rtx fnaddr, rtx static_chain)
  4399. +{
  4400. + /* Store the address to the static chain. */
  4401. + emit_move_insn (gen_rtx_MEM
  4402. + (SImode, plus_constant (addr, TRAMPOLINE_SIZE - 4)),
  4403. + static_chain);
  4404. +
  4405. + /* Store the address to the function. */
  4406. + emit_move_insn (gen_rtx_MEM (SImode, plus_constant (addr, TRAMPOLINE_SIZE)),
  4407. + fnaddr);
  4408. +
  4409. + emit_insn (gen_cache (gen_rtx_REG (SImode, 13),
  4410. + gen_rtx_CONST_INT (SImode,
  4411. + AVR32_CACHE_INVALIDATE_ICACHE)));
  4412. +}
  4413. +
  4414. +
  4415. +/* Return nonzero if X is valid as an addressing register. */
  4416. +int
  4417. +avr32_address_register_rtx_p (rtx x, int strict_p)
  4418. +{
  4419. + int regno;
  4420. +
  4421. + if (!register_operand(x, GET_MODE(x)))
  4422. + return 0;
  4423. +
  4424. + /* If strict we require the register to be a hard register. */
  4425. + if (strict_p
  4426. + && !REG_P(x))
  4427. + return 0;
  4428. +
  4429. + regno = REGNO (x);
  4430. +
  4431. + if (strict_p)
  4432. + return REGNO_OK_FOR_BASE_P (regno);
  4433. +
  4434. + return (regno <= LAST_REGNUM || regno >= FIRST_PSEUDO_REGISTER);
  4435. +}
  4436. +
  4437. +
  4438. +/* Return nonzero if INDEX is valid for an address index operand. */
  4439. +int
  4440. +avr32_legitimate_index_p (enum machine_mode mode, rtx index, int strict_p)
  4441. +{
  4442. + enum rtx_code code = GET_CODE (index);
  4443. +
  4444. + if (GET_MODE_SIZE (mode) > 8)
  4445. + return 0;
  4446. +
  4447. + /* Standard coprocessor addressing modes. */
  4448. + if (code == CONST_INT)
  4449. + {
  4450. + return CONST_OK_FOR_CONSTRAINT_P (INTVAL (index), 'K', "Ks16");
  4451. + }
  4452. +
  4453. + if (avr32_address_register_rtx_p (index, strict_p))
  4454. + return 1;
  4455. +
  4456. + if (code == MULT)
  4457. + {
  4458. + rtx xiop0 = XEXP (index, 0);
  4459. + rtx xiop1 = XEXP (index, 1);
  4460. + return ((avr32_address_register_rtx_p (xiop0, strict_p)
  4461. + && power_of_two_operand (xiop1, SImode)
  4462. + && (INTVAL (xiop1) <= 8))
  4463. + || (avr32_address_register_rtx_p (xiop1, strict_p)
  4464. + && power_of_two_operand (xiop0, SImode)
  4465. + && (INTVAL (xiop0) <= 8)));
  4466. + }
  4467. + else if (code == ASHIFT)
  4468. + {
  4469. + rtx op = XEXP (index, 1);
  4470. +
  4471. + return (avr32_address_register_rtx_p (XEXP (index, 0), strict_p)
  4472. + && GET_CODE (op) == CONST_INT
  4473. + && INTVAL (op) > 0 && INTVAL (op) <= 3);
  4474. + }
  4475. +
  4476. + return 0;
  4477. +}
  4478. +
  4479. +
  4480. +/*
  4481. + Used in the GO_IF_LEGITIMATE_ADDRESS macro. Returns a nonzero value if
  4482. + the RTX x is a legitimate memory address.
  4483. +
  4484. + Returns NO_REGS if the address is not legatime, GENERAL_REGS or ALL_REGS
  4485. + if it is.
  4486. +*/
  4487. +
  4488. +
  4489. +/* Forward declaration */
  4490. +int is_minipool_label (rtx label);
  4491. +
  4492. +int
  4493. +avr32_legitimate_address (enum machine_mode mode, rtx x, int strict)
  4494. +{
  4495. +
  4496. + switch (GET_CODE (x))
  4497. + {
  4498. + case REG:
  4499. + return avr32_address_register_rtx_p (x, strict);
  4500. + case CONST_INT:
  4501. + return ((mode==SImode) && TARGET_RMW_ADDRESSABLE_DATA
  4502. + && CONST_OK_FOR_CONSTRAINT_P(INTVAL(x), 'K', "Ks17"));
  4503. + case CONST:
  4504. + {
  4505. + rtx label = avr32_find_symbol (x);
  4506. + if (label
  4507. + &&
  4508. + (/*
  4509. + If we enable (const (plus (symbol_ref ...))) type constant
  4510. + pool entries we must add support for it in the predicates and
  4511. + in the minipool generation in avr32_reorg().
  4512. + (CONSTANT_POOL_ADDRESS_P (label)
  4513. + && !(flag_pic
  4514. + && (symbol_mentioned_p (get_pool_constant (label))
  4515. + || label_mentioned_p (get_pool_constant (label)))))
  4516. + ||*/
  4517. + ((GET_CODE (label) == LABEL_REF)
  4518. + && GET_CODE (XEXP (label, 0)) == CODE_LABEL
  4519. + && is_minipool_label (XEXP (label, 0)))
  4520. + /*|| ((GET_CODE (label) == SYMBOL_REF)
  4521. + && mode == SImode
  4522. + && SYMBOL_REF_RMW_ADDR(label))*/))
  4523. + {
  4524. + return TRUE;
  4525. + }
  4526. + }
  4527. + break;
  4528. + case LABEL_REF:
  4529. + if (GET_CODE (XEXP (x, 0)) == CODE_LABEL
  4530. + && is_minipool_label (XEXP (x, 0)))
  4531. + {
  4532. + return TRUE;
  4533. + }
  4534. + break;
  4535. + case SYMBOL_REF:
  4536. + {
  4537. + if (CONSTANT_POOL_ADDRESS_P (x)
  4538. + && !(flag_pic
  4539. + && (symbol_mentioned_p (get_pool_constant (x))
  4540. + || label_mentioned_p (get_pool_constant (x)))))
  4541. + return TRUE;
  4542. + else if (SYMBOL_REF_RCALL_FUNCTION_P (x)
  4543. + || (mode == SImode
  4544. + && SYMBOL_REF_RMW_ADDR (x)))
  4545. + return TRUE;
  4546. + break;
  4547. + }
  4548. + case PRE_DEC: /* (pre_dec (...)) */
  4549. + case POST_INC: /* (post_inc (...)) */
  4550. + return avr32_address_register_rtx_p (XEXP (x, 0), strict);
  4551. + case PLUS: /* (plus (...) (...)) */
  4552. + {
  4553. + rtx xop0 = XEXP (x, 0);
  4554. + rtx xop1 = XEXP (x, 1);
  4555. +
  4556. + return ((avr32_address_register_rtx_p (xop0, strict)
  4557. + && avr32_legitimate_index_p (mode, xop1, strict))
  4558. + || (avr32_address_register_rtx_p (xop1, strict)
  4559. + && avr32_legitimate_index_p (mode, xop0, strict)));
  4560. + }
  4561. + default:
  4562. + break;
  4563. + }
  4564. +
  4565. + return FALSE;
  4566. +}
  4567. +
  4568. +
  4569. +int
  4570. +avr32_const_ok_for_move (HOST_WIDE_INT c)
  4571. +{
  4572. + if ( TARGET_V2_INSNS )
  4573. + return ( avr32_const_ok_for_constraint_p (c, 'K', "Ks21")
  4574. + /* movh instruction */
  4575. + || avr32_hi16_immediate_operand (GEN_INT(c), VOIDmode) );
  4576. + else
  4577. + return avr32_const_ok_for_constraint_p (c, 'K', "Ks21");
  4578. +}
  4579. +
  4580. +
  4581. +int
  4582. +avr32_const_double_immediate (rtx value)
  4583. +{
  4584. + HOST_WIDE_INT hi, lo;
  4585. +
  4586. + if (GET_CODE (value) != CONST_DOUBLE)
  4587. + return FALSE;
  4588. +
  4589. + if (SCALAR_FLOAT_MODE_P (GET_MODE (value)))
  4590. + {
  4591. + HOST_WIDE_INT target_float[2];
  4592. + hi = lo = 0;
  4593. + real_to_target (target_float, CONST_DOUBLE_REAL_VALUE (value),
  4594. + GET_MODE (value));
  4595. + lo = target_float[0];
  4596. + hi = target_float[1];
  4597. + }
  4598. + else
  4599. + {
  4600. + hi = CONST_DOUBLE_HIGH (value);
  4601. + lo = CONST_DOUBLE_LOW (value);
  4602. + }
  4603. +
  4604. + if (avr32_const_ok_for_constraint_p (lo, 'K', "Ks21")
  4605. + && (GET_MODE (value) == SFmode
  4606. + || avr32_const_ok_for_constraint_p (hi, 'K', "Ks21")))
  4607. + {
  4608. + return TRUE;
  4609. + }
  4610. +
  4611. + return FALSE;
  4612. +}
  4613. +
  4614. +
  4615. +int
  4616. +avr32_legitimate_constant_p (rtx x)
  4617. +{
  4618. + switch (GET_CODE (x))
  4619. + {
  4620. + case CONST_INT:
  4621. + /* Check if we should put large immediate into constant pool
  4622. + or load them directly with mov/orh.*/
  4623. + if (!avr32_imm_in_const_pool)
  4624. + return 1;
  4625. +
  4626. + return avr32_const_ok_for_move (INTVAL (x));
  4627. + case CONST_DOUBLE:
  4628. + /* Check if we should put large immediate into constant pool
  4629. + or load them directly with mov/orh.*/
  4630. + if (!avr32_imm_in_const_pool)
  4631. + return 1;
  4632. +
  4633. + if (GET_MODE (x) == SFmode
  4634. + || GET_MODE (x) == DFmode || GET_MODE (x) == DImode)
  4635. + return avr32_const_double_immediate (x);
  4636. + else
  4637. + return 0;
  4638. + case LABEL_REF:
  4639. + case SYMBOL_REF:
  4640. + return avr32_find_symbol (x) && (flag_pic || TARGET_HAS_ASM_ADDR_PSEUDOS);
  4641. + case CONST:
  4642. + case HIGH:
  4643. + case CONST_VECTOR:
  4644. + return 0;
  4645. + default:
  4646. + printf ("%s():\n", __FUNCTION__);
  4647. + debug_rtx (x);
  4648. + return 1;
  4649. + }
  4650. +}
  4651. +
  4652. +
  4653. +/* Strip any special encoding from labels */
  4654. +const char *
  4655. +avr32_strip_name_encoding (const char *name)
  4656. +{
  4657. + const char *stripped = name;
  4658. +
  4659. + while (1)
  4660. + {
  4661. + switch (stripped[0])
  4662. + {
  4663. + case '#':
  4664. + stripped = strchr (name + 1, '#') + 1;
  4665. + break;
  4666. + case '*':
  4667. + stripped = &stripped[1];
  4668. + break;
  4669. + default:
  4670. + return stripped;
  4671. + }
  4672. + }
  4673. +}
  4674. +
  4675. +
  4676. +
  4677. +/* Do anything needed before RTL is emitted for each function. */
  4678. +static struct machine_function *
  4679. +avr32_init_machine_status (void)
  4680. +{
  4681. + struct machine_function *machine;
  4682. + machine =
  4683. + (machine_function *) ggc_alloc_cleared (sizeof (machine_function));
  4684. +
  4685. +#if AVR32_FT_UNKNOWN != 0
  4686. + machine->func_type = AVR32_FT_UNKNOWN;
  4687. +#endif
  4688. +
  4689. + machine->minipool_label_head = 0;
  4690. + machine->minipool_label_tail = 0;
  4691. + machine->ifcvt_after_reload = 0;
  4692. + return machine;
  4693. +}
  4694. +
  4695. +
  4696. +void
  4697. +avr32_init_expanders (void)
  4698. +{
  4699. + /* Arrange to initialize and mark the machine per-function status. */
  4700. + init_machine_status = avr32_init_machine_status;
  4701. +}
  4702. +
  4703. +
  4704. +/* Return an RTX indicating where the return address to the
  4705. + calling function can be found. */
  4706. +rtx
  4707. +avr32_return_addr (int count, rtx frame ATTRIBUTE_UNUSED)
  4708. +{
  4709. + if (count != 0)
  4710. + return NULL_RTX;
  4711. +
  4712. + return get_hard_reg_initial_val (Pmode, LR_REGNUM);
  4713. +}
  4714. +
  4715. +
  4716. +void
  4717. +avr32_encode_section_info (tree decl, rtx rtl, int first)
  4718. +{
  4719. + default_encode_section_info(decl, rtl, first);
  4720. +
  4721. + if ( TREE_CODE (decl) == VAR_DECL
  4722. + && (GET_CODE (XEXP (rtl, 0)) == SYMBOL_REF)
  4723. + && (lookup_attribute ("rmw_addressable", DECL_ATTRIBUTES (decl))
  4724. + || TARGET_RMW_ADDRESSABLE_DATA) ){
  4725. + if ( !TARGET_RMW || flag_pic )
  4726. + return;
  4727. + // {
  4728. + // warning ("Using RMW addressable data with an arch that does not support RMW instructions.");
  4729. + // return;
  4730. + // }
  4731. + //
  4732. + //if ( flag_pic )
  4733. + // {
  4734. + // warning ("Using RMW addressable data with together with -fpic switch. Can not use RMW instruction when compiling with -fpic.");
  4735. + // return;
  4736. + // }
  4737. + SYMBOL_REF_FLAGS (XEXP (rtl, 0)) |= (1 << SYMBOL_FLAG_RMW_ADDR_SHIFT);
  4738. + }
  4739. +}
  4740. +
  4741. +
  4742. +void
  4743. +avr32_asm_output_label (FILE * stream, const char *name)
  4744. +{
  4745. + name = avr32_strip_name_encoding (name);
  4746. +
  4747. + /* Print the label. */
  4748. + assemble_name (stream, name);
  4749. + fprintf (stream, ":\n");
  4750. +}
  4751. +
  4752. +
  4753. +void
  4754. +avr32_asm_weaken_label (FILE * stream, const char *name)
  4755. +{
  4756. + fprintf (stream, "\t.weak ");
  4757. + assemble_name (stream, name);
  4758. + fprintf (stream, "\n");
  4759. +}
  4760. +
  4761. +
  4762. +/*
  4763. + Checks if a labelref is equal to a reserved word in the assembler. If it is,
  4764. + insert a '_' before the label name.
  4765. +*/
  4766. +void
  4767. +avr32_asm_output_labelref (FILE * stream, const char *name)
  4768. +{
  4769. + int verbatim = FALSE;
  4770. + const char *stripped = name;
  4771. + int strip_finished = FALSE;
  4772. +
  4773. + while (!strip_finished)
  4774. + {
  4775. + switch (stripped[0])
  4776. + {
  4777. + case '#':
  4778. + stripped = strchr (name + 1, '#') + 1;
  4779. + break;
  4780. + case '*':
  4781. + stripped = &stripped[1];
  4782. + verbatim = TRUE;
  4783. + break;
  4784. + default:
  4785. + strip_finished = TRUE;
  4786. + break;
  4787. + }
  4788. + }
  4789. +
  4790. + if (verbatim)
  4791. + fputs (stripped, stream);
  4792. + else
  4793. + asm_fprintf (stream, "%U%s", stripped);
  4794. +}
  4795. +
  4796. +
  4797. +/*
  4798. + Check if the comparison in compare_exp is redundant
  4799. + for the condition given in next_cond given that the
  4800. + needed flags are already set by an earlier instruction.
  4801. + Uses cc_prev_status to check this.
  4802. +
  4803. + Returns NULL_RTX if the compare is not redundant
  4804. + or the new condition to use in the conditional
  4805. + instruction if the compare is redundant.
  4806. +*/
  4807. +static rtx
  4808. +is_compare_redundant (rtx compare_exp, rtx next_cond)
  4809. +{
  4810. + int z_flag_valid = FALSE;
  4811. + int n_flag_valid = FALSE;
  4812. + rtx new_cond;
  4813. +
  4814. + if (GET_CODE (compare_exp) != COMPARE
  4815. + && GET_CODE (compare_exp) != AND)
  4816. + return NULL_RTX;
  4817. +
  4818. +
  4819. + if (rtx_equal_p (cc_prev_status.mdep.value, compare_exp))
  4820. + {
  4821. + /* cc0 already contains the correct comparison -> delete cmp insn */
  4822. + return next_cond;
  4823. + }
  4824. +
  4825. + if (GET_MODE (compare_exp) != SImode)
  4826. + return NULL_RTX;
  4827. +
  4828. + switch (cc_prev_status.mdep.flags)
  4829. + {
  4830. + case CC_SET_VNCZ:
  4831. + case CC_SET_NCZ:
  4832. + n_flag_valid = TRUE;
  4833. + case CC_SET_CZ:
  4834. + case CC_SET_Z:
  4835. + z_flag_valid = TRUE;
  4836. + }
  4837. +
  4838. + if (cc_prev_status.mdep.value
  4839. + && GET_CODE (compare_exp) == COMPARE
  4840. + && REG_P (XEXP (compare_exp, 0))
  4841. + && REGNO (XEXP (compare_exp, 0)) == REGNO (cc_prev_status.mdep.value)
  4842. + && GET_CODE (XEXP (compare_exp, 1)) == CONST_INT
  4843. + && next_cond != NULL_RTX)
  4844. + {
  4845. + if (INTVAL (XEXP (compare_exp, 1)) == 0
  4846. + && z_flag_valid
  4847. + && (GET_CODE (next_cond) == EQ || GET_CODE (next_cond) == NE))
  4848. + /* We can skip comparison Z flag is already reflecting ops[0] */
  4849. + return next_cond;
  4850. + else if (n_flag_valid
  4851. + && ((INTVAL (XEXP (compare_exp, 1)) == 0
  4852. + && (GET_CODE (next_cond) == GE
  4853. + || GET_CODE (next_cond) == LT))
  4854. + || (INTVAL (XEXP (compare_exp, 1)) == -1
  4855. + && (GET_CODE (next_cond) == GT
  4856. + || GET_CODE (next_cond) == LE))))
  4857. + {
  4858. + /* We can skip comparison N flag is already reflecting ops[0],
  4859. + which means that we can use the mi/pl conditions to check if
  4860. + ops[0] is GE or LT 0. */
  4861. + if ((GET_CODE (next_cond) == GE) || (GET_CODE (next_cond) == GT))
  4862. + new_cond =
  4863. + gen_rtx_UNSPEC (GET_MODE (next_cond), gen_rtvec (2, cc0_rtx, const0_rtx),
  4864. + UNSPEC_COND_PL);
  4865. + else
  4866. + new_cond =
  4867. + gen_rtx_UNSPEC (GET_MODE (next_cond), gen_rtvec (2, cc0_rtx, const0_rtx),
  4868. + UNSPEC_COND_MI);
  4869. + return new_cond;
  4870. + }
  4871. + }
  4872. + return NULL_RTX;
  4873. +}
  4874. +
  4875. +
  4876. +/* Updates cc_status. */
  4877. +void
  4878. +avr32_notice_update_cc (rtx exp, rtx insn)
  4879. +{
  4880. + enum attr_cc attr_cc = get_attr_cc (insn);
  4881. +
  4882. + if ( attr_cc == CC_SET_Z_IF_NOT_V2 )
  4883. + {
  4884. + if (TARGET_V2_INSNS)
  4885. + attr_cc = CC_NONE;
  4886. + else
  4887. + attr_cc = CC_SET_Z;
  4888. + }
  4889. +
  4890. + switch (attr_cc)
  4891. + {
  4892. + case CC_CALL_SET:
  4893. + CC_STATUS_INIT;
  4894. + /* Check if the function call returns a value in r12 */
  4895. + if (REG_P (recog_data.operand[0])
  4896. + && REGNO (recog_data.operand[0]) == RETVAL_REGNUM)
  4897. + {
  4898. + cc_status.flags = 0;
  4899. + cc_status.mdep.value =
  4900. + gen_rtx_COMPARE (SImode, recog_data.operand[0], const0_rtx);
  4901. + cc_status.mdep.flags = CC_SET_VNCZ;
  4902. + cc_status.mdep.cond_exec_cmp_clobbered = 0;
  4903. +
  4904. + }
  4905. + break;
  4906. + case CC_COMPARE:
  4907. + {
  4908. + /* Check that compare will not be optimized away if so nothing should
  4909. + be done */
  4910. + rtx compare_exp = SET_SRC (exp);
  4911. + /* Check if we have a tst expression. If so convert it to a
  4912. + compare with 0. */
  4913. + if ( REG_P (SET_SRC (exp)) )
  4914. + compare_exp = gen_rtx_COMPARE (GET_MODE (SET_SRC (exp)),
  4915. + SET_SRC (exp),
  4916. + const0_rtx);
  4917. +
  4918. + if (!next_insn_emits_cmp (insn)
  4919. + && (is_compare_redundant (compare_exp, get_next_insn_cond (insn)) == NULL_RTX))
  4920. + {
  4921. +
  4922. + /* Reset the nonstandard flag */
  4923. + CC_STATUS_INIT;
  4924. + cc_status.flags = 0;
  4925. + cc_status.mdep.value = compare_exp;
  4926. + cc_status.mdep.flags = CC_SET_VNCZ;
  4927. + cc_status.mdep.cond_exec_cmp_clobbered = 0;
  4928. + }
  4929. + }
  4930. + break;
  4931. + case CC_CMP_COND_INSN:
  4932. + {
  4933. + /* Conditional insn that emit the compare itself. */
  4934. + rtx cmp;
  4935. + rtx cmp_op0, cmp_op1;
  4936. + rtx cond;
  4937. + rtx dest;
  4938. + rtx next_insn = next_nonnote_insn (insn);
  4939. +
  4940. + if ( GET_CODE (exp) == COND_EXEC )
  4941. + {
  4942. + cmp_op0 = XEXP (COND_EXEC_TEST (exp), 0);
  4943. + cmp_op1 = XEXP (COND_EXEC_TEST (exp), 1);
  4944. + cond = COND_EXEC_TEST (exp);
  4945. + dest = SET_DEST (COND_EXEC_CODE (exp));
  4946. + }
  4947. + else
  4948. + {
  4949. + /* If then else conditional. compare operands are in operands
  4950. + 4 and 5. */
  4951. + cmp_op0 = recog_data.operand[4];
  4952. + cmp_op1 = recog_data.operand[5];
  4953. + cond = recog_data.operand[1];
  4954. + dest = SET_DEST (exp);
  4955. + }
  4956. +
  4957. + if ( GET_CODE (cmp_op0) == AND )
  4958. + cmp = cmp_op0;
  4959. + else
  4960. + cmp = gen_rtx_COMPARE (GET_MODE (cmp_op0),
  4961. + cmp_op0,
  4962. + cmp_op1);
  4963. +
  4964. + /* Check if the conditional insns updates a register present
  4965. + in the comparison, if so then we must reset the cc_status. */
  4966. + if (REG_P (dest)
  4967. + && (reg_mentioned_p (dest, cmp_op0)
  4968. + || reg_mentioned_p (dest, cmp_op1))
  4969. + && GET_CODE (exp) != COND_EXEC )
  4970. + {
  4971. + CC_STATUS_INIT;
  4972. + }
  4973. + else if (is_compare_redundant (cmp, cond) == NULL_RTX)
  4974. + {
  4975. + /* Reset the nonstandard flag */
  4976. + CC_STATUS_INIT;
  4977. + if ( GET_CODE (cmp_op0) == AND )
  4978. + {
  4979. + cc_status.flags = CC_INVERTED;
  4980. + cc_status.mdep.flags = CC_SET_Z;
  4981. + }
  4982. + else
  4983. + {
  4984. + cc_status.flags = 0;
  4985. + cc_status.mdep.flags = CC_SET_VNCZ;
  4986. + }
  4987. + cc_status.mdep.value = cmp;
  4988. + cc_status.mdep.cond_exec_cmp_clobbered = 0;
  4989. + }
  4990. +
  4991. +
  4992. + /* Check if we have a COND_EXEC insn which updates one
  4993. + of the registers in the compare status. */
  4994. + if (REG_P (dest)
  4995. + && (reg_mentioned_p (dest, cmp_op0)
  4996. + || reg_mentioned_p (dest, cmp_op1))
  4997. + && GET_CODE (exp) == COND_EXEC )
  4998. + cc_status.mdep.cond_exec_cmp_clobbered = 1;
  4999. +
  5000. + if ( cc_status.mdep.cond_exec_cmp_clobbered
  5001. + && GET_CODE (exp) == COND_EXEC
  5002. + && next_insn != NULL
  5003. + && INSN_P (next_insn)
  5004. + && !(GET_CODE (PATTERN (next_insn)) == COND_EXEC
  5005. + && rtx_equal_p (XEXP (COND_EXEC_TEST (PATTERN (next_insn)), 0), cmp_op0)
  5006. + && rtx_equal_p (XEXP (COND_EXEC_TEST (PATTERN (next_insn)), 1), cmp_op1)
  5007. + && (GET_CODE (COND_EXEC_TEST (PATTERN (next_insn))) == GET_CODE (cond)
  5008. + || GET_CODE (COND_EXEC_TEST (PATTERN (next_insn))) == reverse_condition (GET_CODE (cond)))) )
  5009. + {
  5010. + /* We have a sequence of conditional insns where the compare status has been clobbered
  5011. + since the compare no longer reflects the content of the values to compare. */
  5012. + CC_STATUS_INIT;
  5013. + cc_status.mdep.cond_exec_cmp_clobbered = 1;
  5014. + }
  5015. +
  5016. + }
  5017. + break;
  5018. + case CC_BLD:
  5019. + /* Bit load is kind of like an inverted testsi, because the Z flag is
  5020. + inverted */
  5021. + CC_STATUS_INIT;
  5022. + cc_status.flags = CC_INVERTED;
  5023. + cc_status.mdep.value = SET_SRC (exp);
  5024. + cc_status.mdep.flags = CC_SET_Z;
  5025. + cc_status.mdep.cond_exec_cmp_clobbered = 0;
  5026. + break;
  5027. + case CC_NONE:
  5028. + /* Insn does not affect CC at all. Check if the instruction updates
  5029. + some of the register currently reflected in cc0 */
  5030. +
  5031. + if ((GET_CODE (exp) == SET)
  5032. + && (cc_status.value1 || cc_status.value2 || cc_status.mdep.value)
  5033. + && (reg_mentioned_p (SET_DEST (exp), cc_status.value1)
  5034. + || reg_mentioned_p (SET_DEST (exp), cc_status.value2)
  5035. + || reg_mentioned_p (SET_DEST (exp), cc_status.mdep.value)))
  5036. + {
  5037. + CC_STATUS_INIT;
  5038. + }
  5039. +
  5040. + /* If this is a parallel we must step through each of the parallel
  5041. + expressions */
  5042. + if (GET_CODE (exp) == PARALLEL)
  5043. + {
  5044. + int i;
  5045. + for (i = 0; i < XVECLEN (exp, 0); ++i)
  5046. + {
  5047. + rtx vec_exp = XVECEXP (exp, 0, i);
  5048. + if ((GET_CODE (vec_exp) == SET)
  5049. + && (cc_status.value1 || cc_status.value2
  5050. + || cc_status.mdep.value)
  5051. + && (reg_mentioned_p (SET_DEST (vec_exp), cc_status.value1)
  5052. + || reg_mentioned_p (SET_DEST (vec_exp),
  5053. + cc_status.value2)
  5054. + || reg_mentioned_p (SET_DEST (vec_exp),
  5055. + cc_status.mdep.value)))
  5056. + {
  5057. + CC_STATUS_INIT;
  5058. + }
  5059. + }
  5060. + }
  5061. +
  5062. + /* Check if we have memory opartions with post_inc or pre_dec on the
  5063. + register currently reflected in cc0 */
  5064. + if (GET_CODE (exp) == SET
  5065. + && GET_CODE (SET_SRC (exp)) == MEM
  5066. + && (GET_CODE (XEXP (SET_SRC (exp), 0)) == POST_INC
  5067. + || GET_CODE (XEXP (SET_SRC (exp), 0)) == PRE_DEC)
  5068. + &&
  5069. + (reg_mentioned_p
  5070. + (XEXP (XEXP (SET_SRC (exp), 0), 0), cc_status.value1)
  5071. + || reg_mentioned_p (XEXP (XEXP (SET_SRC (exp), 0), 0),
  5072. + cc_status.value2)
  5073. + || reg_mentioned_p (XEXP (XEXP (SET_SRC (exp), 0), 0),
  5074. + cc_status.mdep.value)))
  5075. + CC_STATUS_INIT;
  5076. +
  5077. + if (GET_CODE (exp) == SET
  5078. + && GET_CODE (SET_DEST (exp)) == MEM
  5079. + && (GET_CODE (XEXP (SET_DEST (exp), 0)) == POST_INC
  5080. + || GET_CODE (XEXP (SET_DEST (exp), 0)) == PRE_DEC)
  5081. + &&
  5082. + (reg_mentioned_p
  5083. + (XEXP (XEXP (SET_DEST (exp), 0), 0), cc_status.value1)
  5084. + || reg_mentioned_p (XEXP (XEXP (SET_DEST (exp), 0), 0),
  5085. + cc_status.value2)
  5086. + || reg_mentioned_p (XEXP (XEXP (SET_DEST (exp), 0), 0),
  5087. + cc_status.mdep.value)))
  5088. + CC_STATUS_INIT;
  5089. + break;
  5090. +
  5091. + case CC_SET_VNCZ:
  5092. + CC_STATUS_INIT;
  5093. + cc_status.mdep.value = recog_data.operand[0];
  5094. + cc_status.mdep.flags = CC_SET_VNCZ;
  5095. + cc_status.mdep.cond_exec_cmp_clobbered = 0;
  5096. + break;
  5097. +
  5098. + case CC_SET_NCZ:
  5099. + CC_STATUS_INIT;
  5100. + cc_status.mdep.value = recog_data.operand[0];
  5101. + cc_status.mdep.flags = CC_SET_NCZ;
  5102. + cc_status.mdep.cond_exec_cmp_clobbered = 0;
  5103. + break;
  5104. +
  5105. + case CC_SET_CZ:
  5106. + CC_STATUS_INIT;
  5107. + cc_status.mdep.value = recog_data.operand[0];
  5108. + cc_status.mdep.flags = CC_SET_CZ;
  5109. + cc_status.mdep.cond_exec_cmp_clobbered = 0;
  5110. + break;
  5111. +
  5112. + case CC_SET_Z:
  5113. + CC_STATUS_INIT;
  5114. + cc_status.mdep.value = recog_data.operand[0];
  5115. + cc_status.mdep.flags = CC_SET_Z;
  5116. + cc_status.mdep.cond_exec_cmp_clobbered = 0;
  5117. + break;
  5118. +
  5119. + case CC_CLOBBER:
  5120. + CC_STATUS_INIT;
  5121. + break;
  5122. +
  5123. + default:
  5124. + CC_STATUS_INIT;
  5125. + }
  5126. +}
  5127. +
  5128. +
  5129. +/*
  5130. + Outputs to stdio stream stream the assembler syntax for an instruction
  5131. + operand x. x is an RTL expression.
  5132. +*/
  5133. +void
  5134. +avr32_print_operand (FILE * stream, rtx x, int code)
  5135. +{
  5136. + int error = 0;
  5137. +
  5138. + if ( code == '?' )
  5139. + {
  5140. + /* Predicable instruction, print condition code */
  5141. +
  5142. + /* If the insn should not be conditional then do nothing. */
  5143. + if ( current_insn_predicate == NULL_RTX )
  5144. + return;
  5145. +
  5146. + /* Set x to the predicate to force printing
  5147. + the condition later on. */
  5148. + x = current_insn_predicate;
  5149. +
  5150. + /* Reverse condition if useing bld insn. */
  5151. + if ( GET_CODE (XEXP(current_insn_predicate,0)) == AND )
  5152. + x = reversed_condition (current_insn_predicate);
  5153. + }
  5154. + else if ( code == '!' )
  5155. + {
  5156. + /* Output compare for conditional insn if needed. */
  5157. + rtx new_cond;
  5158. + gcc_assert ( current_insn_predicate != NULL_RTX );
  5159. + new_cond = avr32_output_cmp(current_insn_predicate,
  5160. + GET_MODE(XEXP(current_insn_predicate,0)),
  5161. + XEXP(current_insn_predicate,0),
  5162. + XEXP(current_insn_predicate,1));
  5163. +
  5164. + /* Check if the new condition is a special avr32 condition
  5165. + specified using UNSPECs. If so we must handle it differently. */
  5166. + if ( GET_CODE (new_cond) == UNSPEC )
  5167. + {
  5168. + current_insn_predicate =
  5169. + gen_rtx_UNSPEC (CCmode,
  5170. + gen_rtvec (2,
  5171. + XEXP(current_insn_predicate,0),
  5172. + XEXP(current_insn_predicate,1)),
  5173. + XINT (new_cond, 1));
  5174. + }
  5175. + else
  5176. + {
  5177. + PUT_CODE(current_insn_predicate, GET_CODE(new_cond));
  5178. + }
  5179. + return;
  5180. + }
  5181. +
  5182. + switch (GET_CODE (x))
  5183. + {
  5184. + case UNSPEC:
  5185. + switch (XINT (x, 1))
  5186. + {
  5187. + case UNSPEC_COND_PL:
  5188. + if (code == 'i')
  5189. + fputs ("mi", stream);
  5190. + else
  5191. + fputs ("pl", stream);
  5192. + break;
  5193. + case UNSPEC_COND_MI:
  5194. + if (code == 'i')
  5195. + fputs ("pl", stream);
  5196. + else
  5197. + fputs ("mi", stream);
  5198. + break;
  5199. + default:
  5200. + error = 1;
  5201. + }
  5202. + break;
  5203. + case EQ:
  5204. + if (code == 'i')
  5205. + fputs ("ne", stream);
  5206. + else
  5207. + fputs ("eq", stream);
  5208. + break;
  5209. + case NE:
  5210. + if (code == 'i')
  5211. + fputs ("eq", stream);
  5212. + else
  5213. + fputs ("ne", stream);
  5214. + break;
  5215. + case GT:
  5216. + if (code == 'i')
  5217. + fputs ("le", stream);
  5218. + else
  5219. + fputs ("gt", stream);
  5220. + break;
  5221. + case GTU:
  5222. + if (code == 'i')
  5223. + fputs ("ls", stream);
  5224. + else
  5225. + fputs ("hi", stream);
  5226. + break;
  5227. + case LT:
  5228. + if (code == 'i')
  5229. + fputs ("ge", stream);
  5230. + else
  5231. + fputs ("lt", stream);
  5232. + break;
  5233. + case LTU:
  5234. + if (code == 'i')
  5235. + fputs ("hs", stream);
  5236. + else
  5237. + fputs ("lo", stream);
  5238. + break;
  5239. + case GE:
  5240. + if (code == 'i')
  5241. + fputs ("lt", stream);
  5242. + else
  5243. + fputs ("ge", stream);
  5244. + break;
  5245. + case GEU:
  5246. + if (code == 'i')
  5247. + fputs ("lo", stream);
  5248. + else
  5249. + fputs ("hs", stream);
  5250. + break;
  5251. + case LE:
  5252. + if (code == 'i')
  5253. + fputs ("gt", stream);
  5254. + else
  5255. + fputs ("le", stream);
  5256. + break;
  5257. + case LEU:
  5258. + if (code == 'i')
  5259. + fputs ("hi", stream);
  5260. + else
  5261. + fputs ("ls", stream);
  5262. + break;
  5263. + case CONST_INT:
  5264. + {
  5265. + HOST_WIDE_INT value = INTVAL (x);
  5266. +
  5267. + switch (code)
  5268. + {
  5269. + case 'm':
  5270. + if ( HOST_BITS_PER_WIDE_INT > BITS_PER_WORD )
  5271. + {
  5272. + /* A const_int can be used to represent DImode constants. */
  5273. + value >>= BITS_PER_WORD;
  5274. + }
  5275. + /* We might get a const_int immediate for setting a DI register,
  5276. + we then must then return the correct sign extended DI. The most
  5277. + significant word is just a sign extension. */
  5278. + else if (value < 0)
  5279. + value = -1;
  5280. + else
  5281. + value = 0;
  5282. + break;
  5283. + case 'i':
  5284. + value++;
  5285. + break;
  5286. + case 'p':
  5287. + {
  5288. + /* Set to bit position of first bit set in immediate */
  5289. + int i, bitpos = 32;
  5290. + for (i = 0; i < 32; i++)
  5291. + if (value & (1 << i))
  5292. + {
  5293. + bitpos = i;
  5294. + break;
  5295. + }
  5296. + value = bitpos;
  5297. + }
  5298. + break;
  5299. + case 'z':
  5300. + {
  5301. + /* Set to bit position of first bit cleared in immediate */
  5302. + int i, bitpos = 32;
  5303. + for (i = 0; i < 32; i++)
  5304. + if (!(value & (1 << i)))
  5305. + {
  5306. + bitpos = i;
  5307. + break;
  5308. + }
  5309. + value = bitpos;
  5310. + }
  5311. + break;
  5312. + case 'r':
  5313. + {
  5314. + /* Reglist 8 */
  5315. + char op[50];
  5316. + op[0] = '\0';
  5317. +
  5318. + if (value & 0x01)
  5319. + strcpy (op, "r0-r3");
  5320. + if (value & 0x02)
  5321. + strlen (op) ? strcat (op, ", r4-r7") : strcpy (op,"r4-r7");
  5322. + if (value & 0x04)
  5323. + strlen (op) ? strcat (op, ", r8-r9") : strcpy (op,"r8-r9");
  5324. + if (value & 0x08)
  5325. + strlen (op) ? strcat (op, ", r10") : strcpy (op,"r10");
  5326. + if (value & 0x10)
  5327. + strlen (op) ? strcat (op, ", r11") : strcpy (op,"r11");
  5328. + if (value & 0x20)
  5329. + strlen (op) ? strcat (op, ", r12") : strcpy (op,"r12");
  5330. + if (value & 0x40)
  5331. + strlen (op) ? strcat (op, ", lr") : strcpy (op, "lr");
  5332. + if (value & 0x80)
  5333. + strlen (op) ? strcat (op, ", pc") : strcpy (op, "pc");
  5334. +
  5335. + fputs (op, stream);
  5336. + return;
  5337. + }
  5338. + case 's':
  5339. + {
  5340. + /* Reglist 16 */
  5341. + char reglist16_string[100];
  5342. + int i;
  5343. + bool first_reg = true;
  5344. + reglist16_string[0] = '\0';
  5345. +
  5346. + for (i = 0; i < 16; ++i)
  5347. + {
  5348. + if (value & (1 << i))
  5349. + {
  5350. + first_reg == true ? first_reg = false : strcat(reglist16_string,", ");
  5351. + strcat(reglist16_string,reg_names[INTERNAL_REGNUM(i)]);
  5352. + }
  5353. + }
  5354. + fputs (reglist16_string, stream);
  5355. + return;
  5356. + }
  5357. + case 'h':
  5358. + /* Print halfword part of word */
  5359. + fputs (value ? "b" : "t", stream);
  5360. + return;
  5361. + }
  5362. +
  5363. + /* Print Value */
  5364. + fprintf (stream, "%d", value);
  5365. + break;
  5366. + }
  5367. + case CONST_DOUBLE:
  5368. + {
  5369. + HOST_WIDE_INT hi, lo;
  5370. + if (SCALAR_FLOAT_MODE_P (GET_MODE (x)))
  5371. + {
  5372. + HOST_WIDE_INT target_float[2];
  5373. + hi = lo = 0;
  5374. + real_to_target (target_float, CONST_DOUBLE_REAL_VALUE (x),
  5375. + GET_MODE (x));
  5376. + /* For doubles the most significant part starts at index 0. */
  5377. + if (GET_MODE_SIZE (GET_MODE (x)) > UNITS_PER_WORD)
  5378. + {
  5379. + hi = target_float[0];
  5380. + lo = target_float[1];
  5381. + }
  5382. + else
  5383. + {
  5384. + lo = target_float[0];
  5385. + }
  5386. + }
  5387. + else
  5388. + {
  5389. + hi = CONST_DOUBLE_HIGH (x);
  5390. + lo = CONST_DOUBLE_LOW (x);
  5391. + }
  5392. +
  5393. + if (code == 'm')
  5394. + fprintf (stream, "%ld", hi);
  5395. + else
  5396. + fprintf (stream, "%ld", lo);
  5397. +
  5398. + break;
  5399. + }
  5400. + case CONST:
  5401. + output_addr_const (stream, XEXP (XEXP (x, 0), 0));
  5402. + fprintf (stream, "+%ld", INTVAL (XEXP (XEXP (x, 0), 1)));
  5403. + break;
  5404. + case REG:
  5405. + /* Swap register name if the register is DImode or DFmode. */
  5406. + if (GET_MODE (x) == DImode || GET_MODE (x) == DFmode)
  5407. + {
  5408. + /* Double register must have an even numbered address */
  5409. + gcc_assert (!(REGNO (x) % 2));
  5410. + if (code == 'm')
  5411. + fputs (reg_names[true_regnum (x)], stream);
  5412. + else
  5413. + fputs (reg_names[true_regnum (x) + 1], stream);
  5414. + }
  5415. + else if (GET_MODE (x) == TImode)
  5416. + {
  5417. + switch (code)
  5418. + {
  5419. + case 'T':
  5420. + fputs (reg_names[true_regnum (x)], stream);
  5421. + break;
  5422. + case 'U':
  5423. + fputs (reg_names[true_regnum (x) + 1], stream);
  5424. + break;
  5425. + case 'L':
  5426. + fputs (reg_names[true_regnum (x) + 2], stream);
  5427. + break;
  5428. + case 'B':
  5429. + fputs (reg_names[true_regnum (x) + 3], stream);
  5430. + break;
  5431. + default:
  5432. + fprintf (stream, "%s, %s, %s, %s",
  5433. + reg_names[true_regnum (x) + 3],
  5434. + reg_names[true_regnum (x) + 2],
  5435. + reg_names[true_regnum (x) + 1],
  5436. + reg_names[true_regnum (x)]);
  5437. + break;
  5438. + }
  5439. + }
  5440. + else
  5441. + {
  5442. + fputs (reg_names[true_regnum (x)], stream);
  5443. + }
  5444. + break;
  5445. + case CODE_LABEL:
  5446. + case LABEL_REF:
  5447. + case SYMBOL_REF:
  5448. + output_addr_const (stream, x);
  5449. + break;
  5450. + case MEM:
  5451. + switch (GET_CODE (XEXP (x, 0)))
  5452. + {
  5453. + case LABEL_REF:
  5454. + case SYMBOL_REF:
  5455. + output_addr_const (stream, XEXP (x, 0));
  5456. + break;
  5457. + case MEM:
  5458. + switch (GET_CODE (XEXP (XEXP (x, 0), 0)))
  5459. + {
  5460. + case SYMBOL_REF:
  5461. + output_addr_const (stream, XEXP (XEXP (x, 0), 0));
  5462. + break;
  5463. + default:
  5464. + error = 1;
  5465. + break;
  5466. + }
  5467. + break;
  5468. + case REG:
  5469. + avr32_print_operand (stream, XEXP (x, 0), 0);
  5470. + if (code != 'p')
  5471. + fputs ("[0]", stream);
  5472. + break;
  5473. + case PRE_DEC:
  5474. + fputs ("--", stream);
  5475. + avr32_print_operand (stream, XEXP (XEXP (x, 0), 0), 0);
  5476. + break;
  5477. + case POST_INC:
  5478. + avr32_print_operand (stream, XEXP (XEXP (x, 0), 0), 0);
  5479. + fputs ("++", stream);
  5480. + break;
  5481. + case PLUS:
  5482. + {
  5483. + rtx op0 = XEXP (XEXP (x, 0), 0);
  5484. + rtx op1 = XEXP (XEXP (x, 0), 1);
  5485. + rtx base = NULL_RTX, offset = NULL_RTX;
  5486. +
  5487. + if (avr32_address_register_rtx_p (op0, 1))
  5488. + {
  5489. + base = op0;
  5490. + offset = op1;
  5491. + }
  5492. + else if (avr32_address_register_rtx_p (op1, 1))
  5493. + {
  5494. + /* Operands are switched. */
  5495. + base = op1;
  5496. + offset = op0;
  5497. + }
  5498. +
  5499. + gcc_assert (base && offset
  5500. + && avr32_address_register_rtx_p (base, 1)
  5501. + && avr32_legitimate_index_p (GET_MODE (x), offset,
  5502. + 1));
  5503. +
  5504. + avr32_print_operand (stream, base, 0);
  5505. + fputs ("[", stream);
  5506. + avr32_print_operand (stream, offset, 0);
  5507. + fputs ("]", stream);
  5508. + break;
  5509. + }
  5510. + case CONST:
  5511. + output_addr_const (stream, XEXP (XEXP (XEXP (x, 0), 0), 0));
  5512. + fprintf (stream, " + %ld",
  5513. + INTVAL (XEXP (XEXP (XEXP (x, 0), 0), 1)));
  5514. + break;
  5515. + case CONST_INT:
  5516. + avr32_print_operand (stream, XEXP (x, 0), 0);
  5517. + break;
  5518. + default:
  5519. + error = 1;
  5520. + }
  5521. + break;
  5522. + case MULT:
  5523. + {
  5524. + int value = INTVAL (XEXP (x, 1));
  5525. +
  5526. + /* Convert immediate in multiplication into a shift immediate */
  5527. + switch (value)
  5528. + {
  5529. + case 2:
  5530. + value = 1;
  5531. + break;
  5532. + case 4:
  5533. + value = 2;
  5534. + break;
  5535. + case 8:
  5536. + value = 3;
  5537. + break;
  5538. + default:
  5539. + value = 0;
  5540. + }
  5541. + fprintf (stream, "%s << %i", reg_names[true_regnum (XEXP (x, 0))],
  5542. + value);
  5543. + break;
  5544. + }
  5545. + case ASHIFT:
  5546. + if (GET_CODE (XEXP (x, 1)) == CONST_INT)
  5547. + fprintf (stream, "%s << %i", reg_names[true_regnum (XEXP (x, 0))],
  5548. + (int) INTVAL (XEXP (x, 1)));
  5549. + else if (REG_P (XEXP (x, 1)))
  5550. + fprintf (stream, "%s << %s", reg_names[true_regnum (XEXP (x, 0))],
  5551. + reg_names[true_regnum (XEXP (x, 1))]);
  5552. + else
  5553. + {
  5554. + error = 1;
  5555. + }
  5556. + break;
  5557. + case LSHIFTRT:
  5558. + if (GET_CODE (XEXP (x, 1)) == CONST_INT)
  5559. + fprintf (stream, "%s >> %i", reg_names[true_regnum (XEXP (x, 0))],
  5560. + (int) INTVAL (XEXP (x, 1)));
  5561. + else if (REG_P (XEXP (x, 1)))
  5562. + fprintf (stream, "%s >> %s", reg_names[true_regnum (XEXP (x, 0))],
  5563. + reg_names[true_regnum (XEXP (x, 1))]);
  5564. + else
  5565. + {
  5566. + error = 1;
  5567. + }
  5568. + fprintf (stream, ">>");
  5569. + break;
  5570. + case PARALLEL:
  5571. + {
  5572. + /* Load store multiple */
  5573. + int i;
  5574. + int count = XVECLEN (x, 0);
  5575. + int reglist16 = 0;
  5576. + char reglist16_string[100];
  5577. +
  5578. + for (i = 0; i < count; ++i)
  5579. + {
  5580. + rtx vec_elm = XVECEXP (x, 0, i);
  5581. + if (GET_MODE (vec_elm) != SET)
  5582. + {
  5583. + debug_rtx (vec_elm);
  5584. + internal_error ("Unknown element in parallel expression!");
  5585. + }
  5586. + if (GET_MODE (XEXP (vec_elm, 0)) == REG)
  5587. + {
  5588. + /* Load multiple */
  5589. + reglist16 |= 1 << ASM_REGNUM (REGNO (XEXP (vec_elm, 0)));
  5590. + }
  5591. + else
  5592. + {
  5593. + /* Store multiple */
  5594. + reglist16 |= 1 << ASM_REGNUM (REGNO (XEXP (vec_elm, 1)));
  5595. + }
  5596. + }
  5597. +
  5598. + avr32_make_reglist16 (reglist16, reglist16_string);
  5599. + fputs (reglist16_string, stream);
  5600. +
  5601. + break;
  5602. + }
  5603. +
  5604. + case PLUS:
  5605. + {
  5606. + rtx op0 = XEXP (x, 0);
  5607. + rtx op1 = XEXP (x, 1);
  5608. + rtx base = NULL_RTX, offset = NULL_RTX;
  5609. +
  5610. + if (avr32_address_register_rtx_p (op0, 1))
  5611. + {
  5612. + base = op0;
  5613. + offset = op1;
  5614. + }
  5615. + else if (avr32_address_register_rtx_p (op1, 1))
  5616. + {
  5617. + /* Operands are switched. */
  5618. + base = op1;
  5619. + offset = op0;
  5620. + }
  5621. +
  5622. + gcc_assert (base && offset
  5623. + && avr32_address_register_rtx_p (base, 1)
  5624. + && avr32_legitimate_index_p (GET_MODE (x), offset, 1));
  5625. +
  5626. + avr32_print_operand (stream, base, 0);
  5627. + fputs ("[", stream);
  5628. + avr32_print_operand (stream, offset, 0);
  5629. + fputs ("]", stream);
  5630. + break;
  5631. + }
  5632. +
  5633. + default:
  5634. + error = 1;
  5635. + }
  5636. +
  5637. + if (error)
  5638. + {
  5639. + debug_rtx (x);
  5640. + internal_error ("Illegal expression for avr32_print_operand");
  5641. + }
  5642. +}
  5643. +
  5644. +rtx
  5645. +avr32_get_note_reg_equiv (rtx insn)
  5646. +{
  5647. + rtx note;
  5648. +
  5649. + note = find_reg_note (insn, REG_EQUIV, NULL_RTX);
  5650. +
  5651. + if (note != NULL_RTX)
  5652. + return XEXP (note, 0);
  5653. + else
  5654. + return NULL_RTX;
  5655. +}
  5656. +
  5657. +
  5658. +/*
  5659. + Outputs to stdio stream stream the assembler syntax for an instruction
  5660. + operand that is a memory reference whose address is x. x is an RTL
  5661. + expression.
  5662. +
  5663. + ToDo: fixme.
  5664. +*/
  5665. +void
  5666. +avr32_print_operand_address (FILE * stream, rtx x)
  5667. +{
  5668. + fprintf (stream, "(%d) /* address */", REGNO (x));
  5669. +}
  5670. +
  5671. +
  5672. +/* Return true if _GLOBAL_OFFSET_TABLE_ symbol is mentioned. */
  5673. +bool
  5674. +avr32_got_mentioned_p (rtx addr)
  5675. +{
  5676. + if (GET_CODE (addr) == MEM)
  5677. + addr = XEXP (addr, 0);
  5678. + while (GET_CODE (addr) == CONST)
  5679. + addr = XEXP (addr, 0);
  5680. + if (GET_CODE (addr) == SYMBOL_REF)
  5681. + {
  5682. + return streq (XSTR (addr, 0), "_GLOBAL_OFFSET_TABLE_");
  5683. + }
  5684. + if (GET_CODE (addr) == PLUS || GET_CODE (addr) == MINUS)
  5685. + {
  5686. + bool l1, l2;
  5687. +
  5688. + l1 = avr32_got_mentioned_p (XEXP (addr, 0));
  5689. + l2 = avr32_got_mentioned_p (XEXP (addr, 1));
  5690. + return l1 || l2;
  5691. + }
  5692. + return false;
  5693. +}
  5694. +
  5695. +
  5696. +/* Find the symbol in an address expression. */
  5697. +rtx
  5698. +avr32_find_symbol (rtx addr)
  5699. +{
  5700. + if (GET_CODE (addr) == MEM)
  5701. + addr = XEXP (addr, 0);
  5702. +
  5703. + while (GET_CODE (addr) == CONST)
  5704. + addr = XEXP (addr, 0);
  5705. +
  5706. + if (GET_CODE (addr) == SYMBOL_REF || GET_CODE (addr) == LABEL_REF)
  5707. + return addr;
  5708. + if (GET_CODE (addr) == PLUS)
  5709. + {
  5710. + rtx l1, l2;
  5711. +
  5712. + l1 = avr32_find_symbol (XEXP (addr, 0));
  5713. + l2 = avr32_find_symbol (XEXP (addr, 1));
  5714. + if (l1 != NULL_RTX && l2 == NULL_RTX)
  5715. + return l1;
  5716. + else if (l1 == NULL_RTX && l2 != NULL_RTX)
  5717. + return l2;
  5718. + }
  5719. +
  5720. + return NULL_RTX;
  5721. +}
  5722. +
  5723. +
  5724. +/* Routines for manipulation of the constant pool. */
  5725. +
  5726. +/* AVR32 instructions cannot load a large constant directly into a
  5727. + register; they have to come from a pc relative load. The constant
  5728. + must therefore be placed in the addressable range of the pc
  5729. + relative load. Depending on the precise pc relative load
  5730. + instruction the range is somewhere between 256 bytes and 4k. This
  5731. + means that we often have to dump a constant inside a function, and
  5732. + generate code to branch around it.
  5733. +
  5734. + It is important to minimize this, since the branches will slow
  5735. + things down and make the code larger.
  5736. +
  5737. + Normally we can hide the table after an existing unconditional
  5738. + branch so that there is no interruption of the flow, but in the
  5739. + worst case the code looks like this:
  5740. +
  5741. + lddpc rn, L1
  5742. + ...
  5743. + rjmp L2
  5744. + align
  5745. + L1: .long value
  5746. + L2:
  5747. + ...
  5748. +
  5749. + lddpc rn, L3
  5750. + ...
  5751. + rjmp L4
  5752. + align
  5753. + L3: .long value
  5754. + L4:
  5755. + ...
  5756. +
  5757. + We fix this by performing a scan after scheduling, which notices
  5758. + which instructions need to have their operands fetched from the
  5759. + constant table and builds the table.
  5760. +
  5761. + The algorithm starts by building a table of all the constants that
  5762. + need fixing up and all the natural barriers in the function (places
  5763. + where a constant table can be dropped without breaking the flow).
  5764. + For each fixup we note how far the pc-relative replacement will be
  5765. + able to reach and the offset of the instruction into the function.
  5766. +
  5767. + Having built the table we then group the fixes together to form
  5768. + tables that are as large as possible (subject to addressing
  5769. + constraints) and emit each table of constants after the last
  5770. + barrier that is within range of all the instructions in the group.
  5771. + If a group does not contain a barrier, then we forcibly create one
  5772. + by inserting a jump instruction into the flow. Once the table has
  5773. + been inserted, the insns are then modified to reference the
  5774. + relevant entry in the pool.
  5775. +
  5776. + Possible enhancements to the algorithm (not implemented) are:
  5777. +
  5778. + 1) For some processors and object formats, there may be benefit in
  5779. + aligning the pools to the start of cache lines; this alignment
  5780. + would need to be taken into account when calculating addressability
  5781. + of a pool. */
  5782. +
  5783. +/* These typedefs are located at the start of this file, so that
  5784. + they can be used in the prototypes there. This comment is to
  5785. + remind readers of that fact so that the following structures
  5786. + can be understood more easily.
  5787. +
  5788. + typedef struct minipool_node Mnode;
  5789. + typedef struct minipool_fixup Mfix; */
  5790. +
  5791. +struct minipool_node
  5792. +{
  5793. + /* Doubly linked chain of entries. */
  5794. + Mnode *next;
  5795. + Mnode *prev;
  5796. + /* The maximum offset into the code that this entry can be placed. While
  5797. + pushing fixes for forward references, all entries are sorted in order of
  5798. + increasing max_address. */
  5799. + HOST_WIDE_INT max_address;
  5800. + /* Similarly for an entry inserted for a backwards ref. */
  5801. + HOST_WIDE_INT min_address;
  5802. + /* The number of fixes referencing this entry. This can become zero if we
  5803. + "unpush" an entry. In this case we ignore the entry when we come to
  5804. + emit the code. */
  5805. + int refcount;
  5806. + /* The offset from the start of the minipool. */
  5807. + HOST_WIDE_INT offset;
  5808. + /* The value in table. */
  5809. + rtx value;
  5810. + /* The mode of value. */
  5811. + enum machine_mode mode;
  5812. + /* The size of the value. */
  5813. + int fix_size;
  5814. +};
  5815. +
  5816. +
  5817. +struct minipool_fixup
  5818. +{
  5819. + Mfix *next;
  5820. + rtx insn;
  5821. + HOST_WIDE_INT address;
  5822. + rtx *loc;
  5823. + enum machine_mode mode;
  5824. + int fix_size;
  5825. + rtx value;
  5826. + Mnode *minipool;
  5827. + HOST_WIDE_INT forwards;
  5828. + HOST_WIDE_INT backwards;
  5829. +};
  5830. +
  5831. +
  5832. +/* Fixes less than a word need padding out to a word boundary. */
  5833. +#define MINIPOOL_FIX_SIZE(mode, value) \
  5834. + (IS_FORCE_MINIPOOL(value) ? 0 : \
  5835. + (GET_MODE_SIZE ((mode)) >= 4 ? GET_MODE_SIZE ((mode)) : 4))
  5836. +
  5837. +#define IS_FORCE_MINIPOOL(x) \
  5838. + (GET_CODE(x) == UNSPEC && \
  5839. + XINT(x, 1) == UNSPEC_FORCE_MINIPOOL)
  5840. +
  5841. +static Mnode *minipool_vector_head;
  5842. +static Mnode *minipool_vector_tail;
  5843. +
  5844. +/* The linked list of all minipool fixes required for this function. */
  5845. +Mfix *minipool_fix_head;
  5846. +Mfix *minipool_fix_tail;
  5847. +/* The fix entry for the current minipool, once it has been placed. */
  5848. +Mfix *minipool_barrier;
  5849. +
  5850. +
  5851. +/* Determines if INSN is the start of a jump table. Returns the end
  5852. + of the TABLE or NULL_RTX. */
  5853. +static rtx
  5854. +is_jump_table (rtx insn)
  5855. +{
  5856. + rtx table;
  5857. +
  5858. + if (GET_CODE (insn) == JUMP_INSN
  5859. + && JUMP_LABEL (insn) != NULL
  5860. + && ((table = next_real_insn (JUMP_LABEL (insn)))
  5861. + == next_real_insn (insn))
  5862. + && table != NULL
  5863. + && GET_CODE (table) == JUMP_INSN
  5864. + && (GET_CODE (PATTERN (table)) == ADDR_VEC
  5865. + || GET_CODE (PATTERN (table)) == ADDR_DIFF_VEC))
  5866. + return table;
  5867. +
  5868. + return NULL_RTX;
  5869. +}
  5870. +
  5871. +
  5872. +static HOST_WIDE_INT
  5873. +get_jump_table_size (rtx insn)
  5874. +{
  5875. + /* ADDR_VECs only take room if read-only data does into the text section. */
  5876. + if (JUMP_TABLES_IN_TEXT_SECTION
  5877. +#if !defined(READONLY_DATA_SECTION_ASM_OP)
  5878. + || 1
  5879. +#endif
  5880. + )
  5881. + {
  5882. + rtx body = PATTERN (insn);
  5883. + int elt = GET_CODE (body) == ADDR_DIFF_VEC ? 1 : 0;
  5884. +
  5885. + return GET_MODE_SIZE (GET_MODE (body)) * XVECLEN (body, elt);
  5886. + }
  5887. +
  5888. + return 0;
  5889. +}
  5890. +
  5891. +
  5892. +/* Move a minipool fix MP from its current location to before MAX_MP.
  5893. + If MAX_MP is NULL, then MP doesn't need moving, but the addressing
  5894. + constraints may need updating. */
  5895. +static Mnode *
  5896. +move_minipool_fix_forward_ref (Mnode * mp, Mnode * max_mp,
  5897. + HOST_WIDE_INT max_address)
  5898. +{
  5899. + /* This should never be true and the code below assumes these are
  5900. + different. */
  5901. + if (mp == max_mp)
  5902. + abort ();
  5903. +
  5904. + if (max_mp == NULL)
  5905. + {
  5906. + if (max_address < mp->max_address)
  5907. + mp->max_address = max_address;
  5908. + }
  5909. + else
  5910. + {
  5911. + if (max_address > max_mp->max_address - mp->fix_size)
  5912. + mp->max_address = max_mp->max_address - mp->fix_size;
  5913. + else
  5914. + mp->max_address = max_address;
  5915. +
  5916. + /* Unlink MP from its current position. Since max_mp is non-null,
  5917. + mp->prev must be non-null. */
  5918. + mp->prev->next = mp->next;
  5919. + if (mp->next != NULL)
  5920. + mp->next->prev = mp->prev;
  5921. + else
  5922. + minipool_vector_tail = mp->prev;
  5923. +
  5924. + /* Re-insert it before MAX_MP. */
  5925. + mp->next = max_mp;
  5926. + mp->prev = max_mp->prev;
  5927. + max_mp->prev = mp;
  5928. +
  5929. + if (mp->prev != NULL)
  5930. + mp->prev->next = mp;
  5931. + else
  5932. + minipool_vector_head = mp;
  5933. + }
  5934. +
  5935. + /* Save the new entry. */
  5936. + max_mp = mp;
  5937. +
  5938. + /* Scan over the preceding entries and adjust their addresses as required.
  5939. + */
  5940. + while (mp->prev != NULL
  5941. + && mp->prev->max_address > mp->max_address - mp->prev->fix_size)
  5942. + {
  5943. + mp->prev->max_address = mp->max_address - mp->prev->fix_size;
  5944. + mp = mp->prev;
  5945. + }
  5946. +
  5947. + return max_mp;
  5948. +}
  5949. +
  5950. +
  5951. +/* Add a constant to the minipool for a forward reference. Returns the
  5952. + node added or NULL if the constant will not fit in this pool. */
  5953. +static Mnode *
  5954. +add_minipool_forward_ref (Mfix * fix)
  5955. +{
  5956. + /* If set, max_mp is the first pool_entry that has a lower constraint than
  5957. + the one we are trying to add. */
  5958. + Mnode *max_mp = NULL;
  5959. + HOST_WIDE_INT max_address = fix->address + fix->forwards;
  5960. + Mnode *mp;
  5961. +
  5962. + /* If this fix's address is greater than the address of the first entry,
  5963. + then we can't put the fix in this pool. We subtract the size of the
  5964. + current fix to ensure that if the table is fully packed we still have
  5965. + enough room to insert this value by suffling the other fixes forwards. */
  5966. + if (minipool_vector_head &&
  5967. + fix->address >= minipool_vector_head->max_address - fix->fix_size)
  5968. + return NULL;
  5969. +
  5970. + /* Scan the pool to see if a constant with the same value has already been
  5971. + added. While we are doing this, also note the location where we must
  5972. + insert the constant if it doesn't already exist. */
  5973. + for (mp = minipool_vector_head; mp != NULL; mp = mp->next)
  5974. + {
  5975. + if (GET_CODE (fix->value) == GET_CODE (mp->value)
  5976. + && fix->mode == mp->mode
  5977. + && (GET_CODE (fix->value) != CODE_LABEL
  5978. + || (CODE_LABEL_NUMBER (fix->value)
  5979. + == CODE_LABEL_NUMBER (mp->value)))
  5980. + && rtx_equal_p (fix->value, mp->value))
  5981. + {
  5982. + /* More than one fix references this entry. */
  5983. + mp->refcount++;
  5984. + return move_minipool_fix_forward_ref (mp, max_mp, max_address);
  5985. + }
  5986. +
  5987. + /* Note the insertion point if necessary. */
  5988. + if (max_mp == NULL && mp->max_address > max_address)
  5989. + max_mp = mp;
  5990. +
  5991. + }
  5992. +
  5993. + /* The value is not currently in the minipool, so we need to create a new
  5994. + entry for it. If MAX_MP is NULL, the entry will be put on the end of
  5995. + the list since the placement is less constrained than any existing
  5996. + entry. Otherwise, we insert the new fix before MAX_MP and, if
  5997. + necessary, adjust the constraints on the other entries. */
  5998. + mp = xmalloc (sizeof (*mp));
  5999. + mp->fix_size = fix->fix_size;
  6000. + mp->mode = fix->mode;
  6001. + mp->value = fix->value;
  6002. + mp->refcount = 1;
  6003. + /* Not yet required for a backwards ref. */
  6004. + mp->min_address = -65536;
  6005. +
  6006. + if (max_mp == NULL)
  6007. + {
  6008. + mp->max_address = max_address;
  6009. + mp->next = NULL;
  6010. + mp->prev = minipool_vector_tail;
  6011. +
  6012. + if (mp->prev == NULL)
  6013. + {
  6014. + minipool_vector_head = mp;
  6015. + minipool_vector_label = gen_label_rtx ();
  6016. + }
  6017. + else
  6018. + mp->prev->next = mp;
  6019. +
  6020. + minipool_vector_tail = mp;
  6021. + }
  6022. + else
  6023. + {
  6024. + if (max_address > max_mp->max_address - mp->fix_size)
  6025. + mp->max_address = max_mp->max_address - mp->fix_size;
  6026. + else
  6027. + mp->max_address = max_address;
  6028. +
  6029. + mp->next = max_mp;
  6030. + mp->prev = max_mp->prev;
  6031. + max_mp->prev = mp;
  6032. + if (mp->prev != NULL)
  6033. + mp->prev->next = mp;
  6034. + else
  6035. + minipool_vector_head = mp;
  6036. + }
  6037. +
  6038. + /* Save the new entry. */
  6039. + max_mp = mp;
  6040. +
  6041. + /* Scan over the preceding entries and adjust their addresses as required.
  6042. + */
  6043. + while (mp->prev != NULL
  6044. + && mp->prev->max_address > mp->max_address - mp->prev->fix_size)
  6045. + {
  6046. + mp->prev->max_address = mp->max_address - mp->prev->fix_size;
  6047. + mp = mp->prev;
  6048. + }
  6049. +
  6050. + return max_mp;
  6051. +}
  6052. +
  6053. +
  6054. +static Mnode *
  6055. +move_minipool_fix_backward_ref (Mnode * mp, Mnode * min_mp,
  6056. + HOST_WIDE_INT min_address)
  6057. +{
  6058. + HOST_WIDE_INT offset;
  6059. +
  6060. + /* This should never be true, and the code below assumes these are
  6061. + different. */
  6062. + if (mp == min_mp)
  6063. + abort ();
  6064. +
  6065. + if (min_mp == NULL)
  6066. + {
  6067. + if (min_address > mp->min_address)
  6068. + mp->min_address = min_address;
  6069. + }
  6070. + else
  6071. + {
  6072. + /* We will adjust this below if it is too loose. */
  6073. + mp->min_address = min_address;
  6074. +
  6075. + /* Unlink MP from its current position. Since min_mp is non-null,
  6076. + mp->next must be non-null. */
  6077. + mp->next->prev = mp->prev;
  6078. + if (mp->prev != NULL)
  6079. + mp->prev->next = mp->next;
  6080. + else
  6081. + minipool_vector_head = mp->next;
  6082. +
  6083. + /* Reinsert it after MIN_MP. */
  6084. + mp->prev = min_mp;
  6085. + mp->next = min_mp->next;
  6086. + min_mp->next = mp;
  6087. + if (mp->next != NULL)
  6088. + mp->next->prev = mp;
  6089. + else
  6090. + minipool_vector_tail = mp;
  6091. + }
  6092. +
  6093. + min_mp = mp;
  6094. +
  6095. + offset = 0;
  6096. + for (mp = minipool_vector_head; mp != NULL; mp = mp->next)
  6097. + {
  6098. + mp->offset = offset;
  6099. + if (mp->refcount > 0)
  6100. + offset += mp->fix_size;
  6101. +
  6102. + if (mp->next && mp->next->min_address < mp->min_address + mp->fix_size)
  6103. + mp->next->min_address = mp->min_address + mp->fix_size;
  6104. + }
  6105. +
  6106. + return min_mp;
  6107. +}
  6108. +
  6109. +
  6110. +/* Add a constant to the minipool for a backward reference. Returns the
  6111. + node added or NULL if the constant will not fit in this pool.
  6112. +
  6113. + Note that the code for insertion for a backwards reference can be
  6114. + somewhat confusing because the calculated offsets for each fix do
  6115. + not take into account the size of the pool (which is still under
  6116. + construction. */
  6117. +static Mnode *
  6118. +add_minipool_backward_ref (Mfix * fix)
  6119. +{
  6120. + /* If set, min_mp is the last pool_entry that has a lower constraint than
  6121. + the one we are trying to add. */
  6122. + Mnode *min_mp = NULL;
  6123. + /* This can be negative, since it is only a constraint. */
  6124. + HOST_WIDE_INT min_address = fix->address - fix->backwards;
  6125. + Mnode *mp;
  6126. +
  6127. + /* If we can't reach the current pool from this insn, or if we can't insert
  6128. + this entry at the end of the pool without pushing other fixes out of
  6129. + range, then we don't try. This ensures that we can't fail later on. */
  6130. + if (min_address >= minipool_barrier->address
  6131. + || (minipool_vector_tail->min_address + fix->fix_size
  6132. + >= minipool_barrier->address))
  6133. + return NULL;
  6134. +
  6135. + /* Scan the pool to see if a constant with the same value has already been
  6136. + added. While we are doing this, also note the location where we must
  6137. + insert the constant if it doesn't already exist. */
  6138. + for (mp = minipool_vector_tail; mp != NULL; mp = mp->prev)
  6139. + {
  6140. + if (GET_CODE (fix->value) == GET_CODE (mp->value)
  6141. + && fix->mode == mp->mode
  6142. + && (GET_CODE (fix->value) != CODE_LABEL
  6143. + || (CODE_LABEL_NUMBER (fix->value)
  6144. + == CODE_LABEL_NUMBER (mp->value)))
  6145. + && rtx_equal_p (fix->value, mp->value)
  6146. + /* Check that there is enough slack to move this entry to the end
  6147. + of the table (this is conservative). */
  6148. + && (mp->max_address
  6149. + > (minipool_barrier->address
  6150. + + minipool_vector_tail->offset
  6151. + + minipool_vector_tail->fix_size)))
  6152. + {
  6153. + mp->refcount++;
  6154. + return move_minipool_fix_backward_ref (mp, min_mp, min_address);
  6155. + }
  6156. +
  6157. + if (min_mp != NULL)
  6158. + mp->min_address += fix->fix_size;
  6159. + else
  6160. + {
  6161. + /* Note the insertion point if necessary. */
  6162. + if (mp->min_address < min_address)
  6163. + {
  6164. + min_mp = mp;
  6165. + }
  6166. + else if (mp->max_address
  6167. + < minipool_barrier->address + mp->offset + fix->fix_size)
  6168. + {
  6169. + /* Inserting before this entry would push the fix beyond its
  6170. + maximum address (which can happen if we have re-located a
  6171. + forwards fix); force the new fix to come after it. */
  6172. + min_mp = mp;
  6173. + min_address = mp->min_address + fix->fix_size;
  6174. + }
  6175. + }
  6176. + }
  6177. +
  6178. + /* We need to create a new entry. */
  6179. + mp = xmalloc (sizeof (*mp));
  6180. + mp->fix_size = fix->fix_size;
  6181. + mp->mode = fix->mode;
  6182. + mp->value = fix->value;
  6183. + mp->refcount = 1;
  6184. + mp->max_address = minipool_barrier->address + 65536;
  6185. +
  6186. + mp->min_address = min_address;
  6187. +
  6188. + if (min_mp == NULL)
  6189. + {
  6190. + mp->prev = NULL;
  6191. + mp->next = minipool_vector_head;
  6192. +
  6193. + if (mp->next == NULL)
  6194. + {
  6195. + minipool_vector_tail = mp;
  6196. + minipool_vector_label = gen_label_rtx ();
  6197. + }
  6198. + else
  6199. + mp->next->prev = mp;
  6200. +
  6201. + minipool_vector_head = mp;
  6202. + }
  6203. + else
  6204. + {
  6205. + mp->next = min_mp->next;
  6206. + mp->prev = min_mp;
  6207. + min_mp->next = mp;
  6208. +
  6209. + if (mp->next != NULL)
  6210. + mp->next->prev = mp;
  6211. + else
  6212. + minipool_vector_tail = mp;
  6213. + }
  6214. +
  6215. + /* Save the new entry. */
  6216. + min_mp = mp;
  6217. +
  6218. + if (mp->prev)
  6219. + mp = mp->prev;
  6220. + else
  6221. + mp->offset = 0;
  6222. +
  6223. + /* Scan over the following entries and adjust their offsets. */
  6224. + while (mp->next != NULL)
  6225. + {
  6226. + if (mp->next->min_address < mp->min_address + mp->fix_size)
  6227. + mp->next->min_address = mp->min_address + mp->fix_size;
  6228. +
  6229. + if (mp->refcount)
  6230. + mp->next->offset = mp->offset + mp->fix_size;
  6231. + else
  6232. + mp->next->offset = mp->offset;
  6233. +
  6234. + mp = mp->next;
  6235. + }
  6236. +
  6237. + return min_mp;
  6238. +}
  6239. +
  6240. +
  6241. +static void
  6242. +assign_minipool_offsets (Mfix * barrier)
  6243. +{
  6244. + HOST_WIDE_INT offset = 0;
  6245. + Mnode *mp;
  6246. +
  6247. + minipool_barrier = barrier;
  6248. +
  6249. + for (mp = minipool_vector_head; mp != NULL; mp = mp->next)
  6250. + {
  6251. + mp->offset = offset;
  6252. +
  6253. + if (mp->refcount > 0)
  6254. + offset += mp->fix_size;
  6255. + }
  6256. +}
  6257. +
  6258. +
  6259. +/* Print a symbolic form of X to the debug file, F. */
  6260. +static void
  6261. +avr32_print_value (FILE * f, rtx x)
  6262. +{
  6263. + switch (GET_CODE (x))
  6264. + {
  6265. + case CONST_INT:
  6266. + fprintf (f, "0x%x", (int) INTVAL (x));
  6267. + return;
  6268. +
  6269. + case CONST_DOUBLE:
  6270. + fprintf (f, "<0x%lx,0x%lx>", (long) XWINT (x, 2), (long) XWINT (x, 3));
  6271. + return;
  6272. +
  6273. + case CONST_VECTOR:
  6274. + {
  6275. + int i;
  6276. +
  6277. + fprintf (f, "<");
  6278. + for (i = 0; i < CONST_VECTOR_NUNITS (x); i++)
  6279. + {
  6280. + fprintf (f, "0x%x", (int) INTVAL (CONST_VECTOR_ELT (x, i)));
  6281. + if (i < (CONST_VECTOR_NUNITS (x) - 1))
  6282. + fputc (',', f);
  6283. + }
  6284. + fprintf (f, ">");
  6285. + }
  6286. + return;
  6287. +
  6288. + case CONST_STRING:
  6289. + fprintf (f, "\"%s\"", XSTR (x, 0));
  6290. + return;
  6291. +
  6292. + case SYMBOL_REF:
  6293. + fprintf (f, "`%s'", XSTR (x, 0));
  6294. + return;
  6295. +
  6296. + case LABEL_REF:
  6297. + fprintf (f, "L%d", INSN_UID (XEXP (x, 0)));
  6298. + return;
  6299. +
  6300. + case CONST:
  6301. + avr32_print_value (f, XEXP (x, 0));
  6302. + return;
  6303. +
  6304. + case PLUS:
  6305. + avr32_print_value (f, XEXP (x, 0));
  6306. + fprintf (f, "+");
  6307. + avr32_print_value (f, XEXP (x, 1));
  6308. + return;
  6309. +
  6310. + case PC:
  6311. + fprintf (f, "pc");
  6312. + return;
  6313. +
  6314. + default:
  6315. + fprintf (f, "????");
  6316. + return;
  6317. + }
  6318. +}
  6319. +
  6320. +
  6321. +int
  6322. +is_minipool_label (rtx label)
  6323. +{
  6324. + minipool_labels *cur_mp_label = cfun->machine->minipool_label_head;
  6325. +
  6326. + if (GET_CODE (label) != CODE_LABEL)
  6327. + return FALSE;
  6328. +
  6329. + while (cur_mp_label)
  6330. + {
  6331. + if (CODE_LABEL_NUMBER (label)
  6332. + == CODE_LABEL_NUMBER (cur_mp_label->label))
  6333. + return TRUE;
  6334. + cur_mp_label = cur_mp_label->next;
  6335. + }
  6336. + return FALSE;
  6337. +}
  6338. +
  6339. +
  6340. +static void
  6341. +new_minipool_label (rtx label)
  6342. +{
  6343. + if (!cfun->machine->minipool_label_head)
  6344. + {
  6345. + cfun->machine->minipool_label_head =
  6346. + ggc_alloc (sizeof (minipool_labels));
  6347. + cfun->machine->minipool_label_tail = cfun->machine->minipool_label_head;
  6348. + cfun->machine->minipool_label_head->label = label;
  6349. + cfun->machine->minipool_label_head->next = 0;
  6350. + cfun->machine->minipool_label_head->prev = 0;
  6351. + }
  6352. + else
  6353. + {
  6354. + cfun->machine->minipool_label_tail->next =
  6355. + ggc_alloc (sizeof (minipool_labels));
  6356. + cfun->machine->minipool_label_tail->next->label = label;
  6357. + cfun->machine->minipool_label_tail->next->next = 0;
  6358. + cfun->machine->minipool_label_tail->next->prev =
  6359. + cfun->machine->minipool_label_tail;
  6360. + cfun->machine->minipool_label_tail =
  6361. + cfun->machine->minipool_label_tail->next;
  6362. + }
  6363. +}
  6364. +
  6365. +
  6366. +/* Output the literal table */
  6367. +static void
  6368. +dump_minipool (rtx scan)
  6369. +{
  6370. + Mnode *mp;
  6371. + Mnode *nmp;
  6372. +
  6373. + if (dump_file)
  6374. + fprintf (dump_file,
  6375. + ";; Emitting minipool after insn %u; address %ld; align %d (bytes)\n",
  6376. + INSN_UID (scan), (unsigned long) minipool_barrier->address, 4);
  6377. +
  6378. + scan = emit_insn_after (gen_consttable_start (), scan);
  6379. + scan = emit_insn_after (gen_align_4 (), scan);
  6380. + scan = emit_label_after (minipool_vector_label, scan);
  6381. + new_minipool_label (minipool_vector_label);
  6382. +
  6383. + for (mp = minipool_vector_head; mp != NULL; mp = nmp)
  6384. + {
  6385. + if (mp->refcount > 0)
  6386. + {
  6387. + if (dump_file)
  6388. + {
  6389. + fprintf (dump_file,
  6390. + ";; Offset %u, min %ld, max %ld ",
  6391. + (unsigned) mp->offset, (unsigned long) mp->min_address,
  6392. + (unsigned long) mp->max_address);
  6393. + avr32_print_value (dump_file, mp->value);
  6394. + fputc ('\n', dump_file);
  6395. + }
  6396. +
  6397. + switch (mp->fix_size)
  6398. + {
  6399. +#ifdef HAVE_consttable_4
  6400. + case 4:
  6401. + scan = emit_insn_after (gen_consttable_4 (mp->value), scan);
  6402. + break;
  6403. +
  6404. +#endif
  6405. +#ifdef HAVE_consttable_8
  6406. + case 8:
  6407. + scan = emit_insn_after (gen_consttable_8 (mp->value), scan);
  6408. + break;
  6409. +
  6410. +#endif
  6411. +#ifdef HAVE_consttable_16
  6412. + case 16:
  6413. + scan = emit_insn_after (gen_consttable_16 (mp->value), scan);
  6414. + break;
  6415. +
  6416. +#endif
  6417. + case 0:
  6418. + /* This can happen for force-minipool entries which just are
  6419. + there to force the minipool to be generate. */
  6420. + break;
  6421. + default:
  6422. + abort ();
  6423. + break;
  6424. + }
  6425. + }
  6426. +
  6427. + nmp = mp->next;
  6428. + free (mp);
  6429. + }
  6430. +
  6431. + minipool_vector_head = minipool_vector_tail = NULL;
  6432. + scan = emit_insn_after (gen_consttable_end (), scan);
  6433. + scan = emit_barrier_after (scan);
  6434. +}
  6435. +
  6436. +
  6437. +/* Return the cost of forcibly inserting a barrier after INSN. */
  6438. +static int
  6439. +avr32_barrier_cost (rtx insn)
  6440. +{
  6441. + /* Basing the location of the pool on the loop depth is preferable, but at
  6442. + the moment, the basic block information seems to be corrupt by this
  6443. + stage of the compilation. */
  6444. + int base_cost = 50;
  6445. + rtx next = next_nonnote_insn (insn);
  6446. +
  6447. + if (next != NULL && GET_CODE (next) == CODE_LABEL)
  6448. + base_cost -= 20;
  6449. +
  6450. + switch (GET_CODE (insn))
  6451. + {
  6452. + case CODE_LABEL:
  6453. + /* It will always be better to place the table before the label, rather
  6454. + than after it. */
  6455. + return 50;
  6456. +
  6457. + case INSN:
  6458. + case CALL_INSN:
  6459. + return base_cost;
  6460. +
  6461. + case JUMP_INSN:
  6462. + return base_cost - 10;
  6463. +
  6464. + default:
  6465. + return base_cost + 10;
  6466. + }
  6467. +}
  6468. +
  6469. +
  6470. +/* Find the best place in the insn stream in the range
  6471. + (FIX->address,MAX_ADDRESS) to forcibly insert a minipool barrier.
  6472. + Create the barrier by inserting a jump and add a new fix entry for
  6473. + it. */
  6474. +static Mfix *
  6475. +create_fix_barrier (Mfix * fix, HOST_WIDE_INT max_address)
  6476. +{
  6477. + HOST_WIDE_INT count = 0;
  6478. + rtx barrier;
  6479. + rtx from = fix->insn;
  6480. + rtx selected = from;
  6481. + int selected_cost;
  6482. + HOST_WIDE_INT selected_address;
  6483. + Mfix *new_fix;
  6484. + HOST_WIDE_INT max_count = max_address - fix->address;
  6485. + rtx label = gen_label_rtx ();
  6486. +
  6487. + selected_cost = avr32_barrier_cost (from);
  6488. + selected_address = fix->address;
  6489. +
  6490. + while (from && count < max_count)
  6491. + {
  6492. + rtx tmp;
  6493. + int new_cost;
  6494. +
  6495. + /* This code shouldn't have been called if there was a natural barrier
  6496. + within range. */
  6497. + if (GET_CODE (from) == BARRIER)
  6498. + abort ();
  6499. +
  6500. + /* Count the length of this insn. */
  6501. + count += get_attr_length (from);
  6502. +
  6503. + /* If there is a jump table, add its length. */
  6504. + tmp = is_jump_table (from);
  6505. + if (tmp != NULL)
  6506. + {
  6507. + count += get_jump_table_size (tmp);
  6508. +
  6509. + /* Jump tables aren't in a basic block, so base the cost on the
  6510. + dispatch insn. If we select this location, we will still put
  6511. + the pool after the table. */
  6512. + new_cost = avr32_barrier_cost (from);
  6513. +
  6514. + if (count < max_count && new_cost <= selected_cost)
  6515. + {
  6516. + selected = tmp;
  6517. + selected_cost = new_cost;
  6518. + selected_address = fix->address + count;
  6519. + }
  6520. +
  6521. + /* Continue after the dispatch table. */
  6522. + from = NEXT_INSN (tmp);
  6523. + continue;
  6524. + }
  6525. +
  6526. + new_cost = avr32_barrier_cost (from);
  6527. +
  6528. + if (count < max_count && new_cost <= selected_cost)
  6529. + {
  6530. + selected = from;
  6531. + selected_cost = new_cost;
  6532. + selected_address = fix->address + count;
  6533. + }
  6534. +
  6535. + from = NEXT_INSN (from);
  6536. + }
  6537. +
  6538. + /* Create a new JUMP_INSN that branches around a barrier. */
  6539. + from = emit_jump_insn_after (gen_jump (label), selected);
  6540. + JUMP_LABEL (from) = label;
  6541. + barrier = emit_barrier_after (from);
  6542. + emit_label_after (label, barrier);
  6543. +
  6544. + /* Create a minipool barrier entry for the new barrier. */
  6545. + new_fix = (Mfix *) obstack_alloc (&minipool_obstack, sizeof (*new_fix));
  6546. + new_fix->insn = barrier;
  6547. + new_fix->address = selected_address;
  6548. + new_fix->next = fix->next;
  6549. + fix->next = new_fix;
  6550. +
  6551. + return new_fix;
  6552. +}
  6553. +
  6554. +
  6555. +/* Record that there is a natural barrier in the insn stream at
  6556. + ADDRESS. */
  6557. +static void
  6558. +push_minipool_barrier (rtx insn, HOST_WIDE_INT address)
  6559. +{
  6560. + Mfix *fix = (Mfix *) obstack_alloc (&minipool_obstack, sizeof (*fix));
  6561. +
  6562. + fix->insn = insn;
  6563. + fix->address = address;
  6564. +
  6565. + fix->next = NULL;
  6566. + if (minipool_fix_head != NULL)
  6567. + minipool_fix_tail->next = fix;
  6568. + else
  6569. + minipool_fix_head = fix;
  6570. +
  6571. + minipool_fix_tail = fix;
  6572. +}
  6573. +
  6574. +
  6575. +/* Record INSN, which will need fixing up to load a value from the
  6576. + minipool. ADDRESS is the offset of the insn since the start of the
  6577. + function; LOC is a pointer to the part of the insn which requires
  6578. + fixing; VALUE is the constant that must be loaded, which is of type
  6579. + MODE. */
  6580. +static void
  6581. +push_minipool_fix (rtx insn, HOST_WIDE_INT address, rtx * loc,
  6582. + enum machine_mode mode, rtx value)
  6583. +{
  6584. + Mfix *fix = (Mfix *) obstack_alloc (&minipool_obstack, sizeof (*fix));
  6585. + rtx body = PATTERN (insn);
  6586. +
  6587. + fix->insn = insn;
  6588. + fix->address = address;
  6589. + fix->loc = loc;
  6590. + fix->mode = mode;
  6591. + fix->fix_size = MINIPOOL_FIX_SIZE (mode, value);
  6592. + fix->value = value;
  6593. +
  6594. + if (GET_CODE (body) == PARALLEL)
  6595. + {
  6596. + /* Mcall : Ks16 << 2 */
  6597. + fix->forwards = ((1 << 15) - 1) << 2;
  6598. + fix->backwards = (1 << 15) << 2;
  6599. + }
  6600. + else if (GET_CODE (body) == SET
  6601. + && GET_MODE_SIZE (GET_MODE (SET_DEST (body))) == 4)
  6602. + {
  6603. + if (optimize_size)
  6604. + {
  6605. + /* Lddpc : Ku7 << 2 */
  6606. + fix->forwards = ((1 << 7) - 1) << 2;
  6607. + fix->backwards = 0;
  6608. + }
  6609. + else
  6610. + {
  6611. + /* Ld.w : Ks16 */
  6612. + fix->forwards = ((1 << 15) - 4);
  6613. + fix->backwards = (1 << 15);
  6614. + }
  6615. + }
  6616. + else if (GET_CODE (body) == SET
  6617. + && GET_MODE_SIZE (GET_MODE (SET_DEST (body))) == 8)
  6618. + {
  6619. + /* Ld.d : Ks16 */
  6620. + fix->forwards = ((1 << 15) - 4);
  6621. + fix->backwards = (1 << 15);
  6622. + }
  6623. + else if (GET_CODE (body) == UNSPEC_VOLATILE
  6624. + && XINT (body, 1) == VUNSPEC_MVRC)
  6625. + {
  6626. + /* Coprocessor load */
  6627. + /* Ldc : Ku8 << 2 */
  6628. + fix->forwards = ((1 << 8) - 1) << 2;
  6629. + fix->backwards = 0;
  6630. + }
  6631. + else
  6632. + {
  6633. + /* Assume worst case which is lddpc insn. */
  6634. + fix->forwards = ((1 << 7) - 1) << 2;
  6635. + fix->backwards = 0;
  6636. + }
  6637. +
  6638. + fix->minipool = NULL;
  6639. +
  6640. + /* If an insn doesn't have a range defined for it, then it isn't expecting
  6641. + to be reworked by this code. Better to abort now than to generate duff
  6642. + assembly code. */
  6643. + if (fix->forwards == 0 && fix->backwards == 0)
  6644. + abort ();
  6645. +
  6646. + if (dump_file)
  6647. + {
  6648. + fprintf (dump_file,
  6649. + ";; %smode fixup for i%d; addr %lu, range (%ld,%ld): ",
  6650. + GET_MODE_NAME (mode),
  6651. + INSN_UID (insn), (unsigned long) address,
  6652. + -1 * (long) fix->backwards, (long) fix->forwards);
  6653. + avr32_print_value (dump_file, fix->value);
  6654. + fprintf (dump_file, "\n");
  6655. + }
  6656. +
  6657. + /* Add it to the chain of fixes. */
  6658. + fix->next = NULL;
  6659. +
  6660. + if (minipool_fix_head != NULL)
  6661. + minipool_fix_tail->next = fix;
  6662. + else
  6663. + minipool_fix_head = fix;
  6664. +
  6665. + minipool_fix_tail = fix;
  6666. +}
  6667. +
  6668. +
  6669. +/* Scan INSN and note any of its operands that need fixing.
  6670. + If DO_PUSHES is false we do not actually push any of the fixups
  6671. + needed. The function returns TRUE is any fixups were needed/pushed.
  6672. + This is used by avr32_memory_load_p() which needs to know about loads
  6673. + of constants that will be converted into minipool loads. */
  6674. +static bool
  6675. +note_invalid_constants (rtx insn, HOST_WIDE_INT address, int do_pushes)
  6676. +{
  6677. + bool result = false;
  6678. + int opno;
  6679. +
  6680. + extract_insn (insn);
  6681. +
  6682. + if (!constrain_operands (1))
  6683. + fatal_insn_not_found (insn);
  6684. +
  6685. + if (recog_data.n_alternatives == 0)
  6686. + return false;
  6687. +
  6688. + /* Fill in recog_op_alt with information about the constraints of this
  6689. + insn. */
  6690. + preprocess_constraints ();
  6691. +
  6692. + for (opno = 0; opno < recog_data.n_operands; opno++)
  6693. + {
  6694. + rtx op;
  6695. +
  6696. + /* Things we need to fix can only occur in inputs. */
  6697. + if (recog_data.operand_type[opno] != OP_IN)
  6698. + continue;
  6699. +
  6700. + op = recog_data.operand[opno];
  6701. +
  6702. + if (avr32_const_pool_ref_operand (op, GET_MODE (op)))
  6703. + {
  6704. + if (do_pushes)
  6705. + {
  6706. + rtx cop = avoid_constant_pool_reference (op);
  6707. +
  6708. + /* Casting the address of something to a mode narrower than a
  6709. + word can cause avoid_constant_pool_reference() to return the
  6710. + pool reference itself. That's no good to us here. Lets
  6711. + just hope that we can use the constant pool value directly.
  6712. + */
  6713. + if (op == cop)
  6714. + cop = get_pool_constant (XEXP (op, 0));
  6715. +
  6716. + push_minipool_fix (insn, address,
  6717. + recog_data.operand_loc[opno],
  6718. + recog_data.operand_mode[opno], cop);
  6719. + }
  6720. +
  6721. + result = true;
  6722. + }
  6723. + else if (TARGET_HAS_ASM_ADDR_PSEUDOS
  6724. + && avr32_address_operand (op, GET_MODE (op)))
  6725. + {
  6726. + /* Handle pseudo instructions using a direct address. These pseudo
  6727. + instructions might need entries in the constant pool and we must
  6728. + therefor create a constant pool for them, in case the
  6729. + assembler/linker needs to insert entries. */
  6730. + if (do_pushes)
  6731. + {
  6732. + /* Push a dummy constant pool entry so that the .cpool
  6733. + directive should be inserted on the appropriate place in the
  6734. + code even if there are no real constant pool entries. This
  6735. + is used by the assembler and linker to know where to put
  6736. + generated constant pool entries. */
  6737. + push_minipool_fix (insn, address,
  6738. + recog_data.operand_loc[opno],
  6739. + recog_data.operand_mode[opno],
  6740. + gen_rtx_UNSPEC (VOIDmode,
  6741. + gen_rtvec (1, const0_rtx),
  6742. + UNSPEC_FORCE_MINIPOOL));
  6743. + result = true;
  6744. + }
  6745. + }
  6746. + }
  6747. + return result;
  6748. +}
  6749. +
  6750. +
  6751. +static int
  6752. +avr32_insn_is_cast (rtx insn)
  6753. +{
  6754. +
  6755. + if (NONJUMP_INSN_P (insn)
  6756. + && GET_CODE (PATTERN (insn)) == SET
  6757. + && (GET_CODE (SET_SRC (PATTERN (insn))) == ZERO_EXTEND
  6758. + || GET_CODE (SET_SRC (PATTERN (insn))) == SIGN_EXTEND)
  6759. + && REG_P (XEXP (SET_SRC (PATTERN (insn)), 0))
  6760. + && REG_P (SET_DEST (PATTERN (insn))))
  6761. + return true;
  6762. + return false;
  6763. +}
  6764. +
  6765. +
  6766. +/* Replace all occurances of reg FROM with reg TO in X. */
  6767. +rtx
  6768. +avr32_replace_reg (rtx x, rtx from, rtx to)
  6769. +{
  6770. + int i, j;
  6771. + const char *fmt;
  6772. +
  6773. + gcc_assert ( REG_P (from) && REG_P (to) );
  6774. +
  6775. + /* Allow this function to make replacements in EXPR_LISTs. */
  6776. + if (x == 0)
  6777. + return 0;
  6778. +
  6779. + if (rtx_equal_p (x, from))
  6780. + return to;
  6781. +
  6782. + if (GET_CODE (x) == SUBREG)
  6783. + {
  6784. + rtx new = avr32_replace_reg (SUBREG_REG (x), from, to);
  6785. +
  6786. + if (GET_CODE (new) == CONST_INT)
  6787. + {
  6788. + x = simplify_subreg (GET_MODE (x), new,
  6789. + GET_MODE (SUBREG_REG (x)),
  6790. + SUBREG_BYTE (x));
  6791. + gcc_assert (x);
  6792. + }
  6793. + else
  6794. + SUBREG_REG (x) = new;
  6795. +
  6796. + return x;
  6797. + }
  6798. + else if (GET_CODE (x) == ZERO_EXTEND)
  6799. + {
  6800. + rtx new = avr32_replace_reg (XEXP (x, 0), from, to);
  6801. +
  6802. + if (GET_CODE (new) == CONST_INT)
  6803. + {
  6804. + x = simplify_unary_operation (ZERO_EXTEND, GET_MODE (x),
  6805. + new, GET_MODE (XEXP (x, 0)));
  6806. + gcc_assert (x);
  6807. + }
  6808. + else
  6809. + XEXP (x, 0) = new;
  6810. +
  6811. + return x;
  6812. + }
  6813. +
  6814. + fmt = GET_RTX_FORMAT (GET_CODE (x));
  6815. + for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
  6816. + {
  6817. + if (fmt[i] == 'e')
  6818. + XEXP (x, i) = avr32_replace_reg (XEXP (x, i), from, to);
  6819. + else if (fmt[i] == 'E')
  6820. + for (j = XVECLEN (x, i) - 1; j >= 0; j--)
  6821. + XVECEXP (x, i, j) = avr32_replace_reg (XVECEXP (x, i, j), from, to);
  6822. + }
  6823. +
  6824. + return x;
  6825. +}
  6826. +
  6827. +
  6828. +/* FIXME: The level of nesting in this function is way too deep. It needs to be
  6829. + torn apart. */
  6830. +static void
  6831. +avr32_reorg_optimization (void)
  6832. +{
  6833. + rtx first = get_first_nonnote_insn ();
  6834. + rtx insn;
  6835. +
  6836. + if (TARGET_MD_REORG_OPTIMIZATION && (optimize_size || (optimize > 0)))
  6837. + {
  6838. +
  6839. + /* Scan through all insns looking for cast operations. */
  6840. + if (dump_file)
  6841. + {
  6842. + fprintf (dump_file, ";; Deleting redundant cast operations:\n");
  6843. + }
  6844. + for (insn = first; insn; insn = NEXT_INSN (insn))
  6845. + {
  6846. + rtx reg, src_reg, scan;
  6847. + enum machine_mode mode;
  6848. + int unused_cast;
  6849. + rtx label_ref;
  6850. +
  6851. + if (avr32_insn_is_cast (insn)
  6852. + && (GET_MODE (XEXP (SET_SRC (PATTERN (insn)), 0)) == QImode
  6853. + || GET_MODE (XEXP (SET_SRC (PATTERN (insn)), 0)) == HImode))
  6854. + {
  6855. + mode = GET_MODE (XEXP (SET_SRC (PATTERN (insn)), 0));
  6856. + reg = SET_DEST (PATTERN (insn));
  6857. + src_reg = XEXP (SET_SRC (PATTERN (insn)), 0);
  6858. + }
  6859. + else
  6860. + {
  6861. + continue;
  6862. + }
  6863. +
  6864. + unused_cast = false;
  6865. + label_ref = NULL_RTX;
  6866. + for (scan = NEXT_INSN (insn); scan; scan = NEXT_INSN (scan))
  6867. + {
  6868. + /* Check if we have reached the destination of a simple
  6869. + conditional jump which we have already scanned past. If so,
  6870. + we can safely continue scanning. */
  6871. + if (LABEL_P (scan) && label_ref != NULL_RTX)
  6872. + {
  6873. + if (CODE_LABEL_NUMBER (scan) ==
  6874. + CODE_LABEL_NUMBER (XEXP (label_ref, 0)))
  6875. + label_ref = NULL_RTX;
  6876. + else
  6877. + break;
  6878. + }
  6879. +
  6880. + if (!INSN_P (scan))
  6881. + continue;
  6882. +
  6883. + /* For conditional jumps we can manage to keep on scanning if
  6884. + we meet the destination label later on before any new jump
  6885. + insns occure. */
  6886. + if (GET_CODE (scan) == JUMP_INSN)
  6887. + {
  6888. + if (any_condjump_p (scan) && label_ref == NULL_RTX)
  6889. + label_ref = condjump_label (scan);
  6890. + else
  6891. + break;
  6892. + }
  6893. +
  6894. + /* Check if we have a call and the register is used as an argument. */
  6895. + if (CALL_P (scan)
  6896. + && find_reg_fusage (scan, USE, reg) )
  6897. + break;
  6898. +
  6899. + if (!reg_mentioned_p (reg, PATTERN (scan)))
  6900. + continue;
  6901. +
  6902. + /* Check if casted register is used in this insn */
  6903. + if ((regno_use_in (REGNO (reg), PATTERN (scan)) != NULL_RTX)
  6904. + && (GET_MODE (regno_use_in (REGNO (reg), PATTERN (scan))) ==
  6905. + GET_MODE (reg)))
  6906. + {
  6907. + /* If not used in the source to the set or in a memory
  6908. + expression in the destiantion then the register is used
  6909. + as a destination and is really dead. */
  6910. + if (single_set (scan)
  6911. + && GET_CODE (PATTERN (scan)) == SET
  6912. + && REG_P (SET_DEST (PATTERN (scan)))
  6913. + && !regno_use_in (REGNO (reg), SET_SRC (PATTERN (scan)))
  6914. + && label_ref == NULL_RTX)
  6915. + {
  6916. + unused_cast = true;
  6917. + }
  6918. + break;
  6919. + }
  6920. +
  6921. + /* Check if register is dead or set in this insn */
  6922. + if (dead_or_set_p (scan, reg))
  6923. + {
  6924. + unused_cast = true;
  6925. + break;
  6926. + }
  6927. + }
  6928. +
  6929. + /* Check if we have unresolved conditional jumps */
  6930. + if (label_ref != NULL_RTX)
  6931. + continue;
  6932. +
  6933. + if (unused_cast)
  6934. + {
  6935. + if (REGNO (reg) == REGNO (XEXP (SET_SRC (PATTERN (insn)), 0)))
  6936. + {
  6937. + /* One operand cast, safe to delete */
  6938. + if (dump_file)
  6939. + {
  6940. + fprintf (dump_file,
  6941. + ";; INSN %i removed, casted register %i value not used.\n",
  6942. + INSN_UID (insn), REGNO (reg));
  6943. + }
  6944. + SET_INSN_DELETED (insn);
  6945. + /* Force the instruction to be recognized again */
  6946. + INSN_CODE (insn) = -1;
  6947. + }
  6948. + else
  6949. + {
  6950. + /* Two operand cast, which really could be substituted with
  6951. + a move, if the source register is dead after the cast
  6952. + insn and then the insn which sets the source register
  6953. + could instead directly set the destination register for
  6954. + the cast. As long as there are no insns in between which
  6955. + uses the register. */
  6956. + rtx link = NULL_RTX;
  6957. + rtx set;
  6958. + rtx src_reg = XEXP (SET_SRC (PATTERN (insn)), 0);
  6959. + unused_cast = false;
  6960. +
  6961. + if (!find_reg_note (insn, REG_DEAD, src_reg))
  6962. + continue;
  6963. +
  6964. + /* Search for the insn which sets the source register */
  6965. + for (scan = PREV_INSN (insn);
  6966. + scan && GET_CODE (scan) != CODE_LABEL;
  6967. + scan = PREV_INSN (scan))
  6968. + {
  6969. + if (! INSN_P (scan))
  6970. + continue;
  6971. +
  6972. + set = single_set (scan);
  6973. + // Fix for bug #11763 : the following if condition
  6974. + // has been modified and else part is included to
  6975. + // set the link to NULL_RTX.
  6976. + // if (set && rtx_equal_p (src_reg, SET_DEST (set)))
  6977. + if (set && (REGNO(src_reg) == REGNO(SET_DEST(set))))
  6978. + {
  6979. + if (rtx_equal_p (src_reg, SET_DEST (set)))
  6980. + {
  6981. + link = scan;
  6982. + break;
  6983. + }
  6984. + else
  6985. + {
  6986. + link = NULL_RTX;
  6987. + break;
  6988. + }
  6989. + }
  6990. + }
  6991. +
  6992. +
  6993. + /* Found no link or link is a call insn where we can not
  6994. + change the destination register */
  6995. + if (link == NULL_RTX || CALL_P (link))
  6996. + continue;
  6997. +
  6998. + /* Scan through all insn between link and insn */
  6999. + for (scan = NEXT_INSN (link); scan; scan = NEXT_INSN (scan))
  7000. + {
  7001. + /* Don't try to trace forward past a CODE_LABEL if we
  7002. + haven't seen INSN yet. Ordinarily, we will only
  7003. + find the setting insn in LOG_LINKS if it is in the
  7004. + same basic block. However, cross-jumping can insert
  7005. + code labels in between the load and the call, and
  7006. + can result in situations where a single call insn
  7007. + may have two targets depending on where we came
  7008. + from. */
  7009. +
  7010. + if (GET_CODE (scan) == CODE_LABEL)
  7011. + break;
  7012. +
  7013. + if (!INSN_P (scan))
  7014. + continue;
  7015. +
  7016. + /* Don't try to trace forward past a JUMP. To optimize
  7017. + safely, we would have to check that all the
  7018. + instructions at the jump destination did not use REG.
  7019. + */
  7020. +
  7021. + if (GET_CODE (scan) == JUMP_INSN)
  7022. + {
  7023. + break;
  7024. + }
  7025. +
  7026. + if (!reg_mentioned_p (src_reg, PATTERN (scan)))
  7027. + continue;
  7028. +
  7029. + /* We have reached the cast insn */
  7030. + if (scan == insn)
  7031. + {
  7032. + /* We can remove cast and replace the destination
  7033. + register of the link insn with the destination
  7034. + of the cast */
  7035. + if (dump_file)
  7036. + {
  7037. + fprintf (dump_file,
  7038. + ";; INSN %i removed, casted value unused. "
  7039. + "Destination of removed cast operation: register %i, folded into INSN %i.\n",
  7040. + INSN_UID (insn), REGNO (reg),
  7041. + INSN_UID (link));
  7042. + }
  7043. + /* Update link insn */
  7044. + SET_DEST (PATTERN (link)) =
  7045. + gen_rtx_REG (mode, REGNO (reg));
  7046. + /* Force the instruction to be recognized again */
  7047. + INSN_CODE (link) = -1;
  7048. +
  7049. + /* Delete insn */
  7050. + SET_INSN_DELETED (insn);
  7051. + /* Force the instruction to be recognized again */
  7052. + INSN_CODE (insn) = -1;
  7053. + break;
  7054. + }
  7055. + }
  7056. + }
  7057. + }
  7058. + }
  7059. + }
  7060. +
  7061. + /* Disabled this optimization since it has a bug */
  7062. + /* In the case where the data instruction the shifted insn gets folded
  7063. + * into is a branch destination, this breaks, i.e.
  7064. + *
  7065. + * add r8, r10, r8 << 2
  7066. + * 1:
  7067. + * ld.w r11, r8[0]
  7068. + * ...
  7069. + * mov r8, sp
  7070. + * rjmp 1b
  7071. + *
  7072. + * gets folded to:
  7073. + *
  7074. + * 1:
  7075. + * ld.w r11, r10[r8 << 2]
  7076. + * ...
  7077. + * mov r8, sp
  7078. + * rjmp 1b
  7079. + *
  7080. + * which is clearly wrong..
  7081. + */
  7082. + if (0 && TARGET_MD_REORG_OPTIMIZATION && (optimize_size || (optimize > 0)))
  7083. + {
  7084. +
  7085. + /* Scan through all insns looking for shifted add operations */
  7086. + if (dump_file)
  7087. + {
  7088. + fprintf (dump_file,
  7089. + ";; Deleting redundant shifted add operations:\n");
  7090. + }
  7091. + for (insn = first; insn; insn = NEXT_INSN (insn))
  7092. + {
  7093. + rtx reg, mem_expr, scan, op0, op1;
  7094. + int add_only_used_as_pointer;
  7095. +
  7096. + if (INSN_P (insn)
  7097. + && GET_CODE (PATTERN (insn)) == SET
  7098. + && GET_CODE (SET_SRC (PATTERN (insn))) == PLUS
  7099. + && (GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 0)) == MULT
  7100. + || GET_CODE (XEXP (SET_SRC (PATTERN (insn)), 0)) == ASHIFT)
  7101. + && GET_CODE (XEXP (XEXP (SET_SRC (PATTERN (insn)), 0), 1)) ==
  7102. + CONST_INT && REG_P (SET_DEST (PATTERN (insn)))
  7103. + && REG_P (XEXP (SET_SRC (PATTERN (insn)), 1))
  7104. + && REG_P (XEXP (XEXP (SET_SRC (PATTERN (insn)), 0), 0)))
  7105. + {
  7106. + reg = SET_DEST (PATTERN (insn));
  7107. + mem_expr = SET_SRC (PATTERN (insn));
  7108. + op0 = XEXP (XEXP (mem_expr, 0), 0);
  7109. + op1 = XEXP (mem_expr, 1);
  7110. + }
  7111. + else
  7112. + {
  7113. + continue;
  7114. + }
  7115. +
  7116. + /* Scan forward the check if the result of the shifted add
  7117. + operation is only used as an address in memory operations and
  7118. + that the operands to the shifted add are not clobbered. */
  7119. + add_only_used_as_pointer = false;
  7120. + for (scan = NEXT_INSN (insn); scan; scan = NEXT_INSN (scan))
  7121. + {
  7122. + if (!INSN_P (scan))
  7123. + continue;
  7124. +
  7125. + /* Don't try to trace forward past a JUMP or CALL. To optimize
  7126. + safely, we would have to check that all the instructions at
  7127. + the jump destination did not use REG. */
  7128. +
  7129. + if (GET_CODE (scan) == JUMP_INSN)
  7130. + {
  7131. + break;
  7132. + }
  7133. +
  7134. + /* If used in a call insn then we cannot optimize it away */
  7135. + if (CALL_P (scan) && find_regno_fusage (scan, USE, REGNO (reg)))
  7136. + break;
  7137. +
  7138. + /* If any of the operands of the shifted add are clobbered we
  7139. + cannot optimize the shifted adda away */
  7140. + if ((reg_set_p (op0, scan) && (REGNO (op0) != REGNO (reg)))
  7141. + || (reg_set_p (op1, scan) && (REGNO (op1) != REGNO (reg))))
  7142. + break;
  7143. +
  7144. + if (!reg_mentioned_p (reg, PATTERN (scan)))
  7145. + continue;
  7146. +
  7147. + /* If used any other place than as a pointer or as the
  7148. + destination register we failed */
  7149. + if (!(single_set (scan)
  7150. + && GET_CODE (PATTERN (scan)) == SET
  7151. + && ((MEM_P (SET_DEST (PATTERN (scan)))
  7152. + && REG_P (XEXP (SET_DEST (PATTERN (scan)), 0))
  7153. + && REGNO (XEXP (SET_DEST (PATTERN (scan)), 0)) == REGNO (reg))
  7154. + || (MEM_P (SET_SRC (PATTERN (scan)))
  7155. + && REG_P (XEXP (SET_SRC (PATTERN (scan)), 0))
  7156. + && REGNO (XEXP
  7157. + (SET_SRC (PATTERN (scan)), 0)) == REGNO (reg))))
  7158. + && !(GET_CODE (PATTERN (scan)) == SET
  7159. + && REG_P (SET_DEST (PATTERN (scan)))
  7160. + && !regno_use_in (REGNO (reg),
  7161. + SET_SRC (PATTERN (scan)))))
  7162. + break;
  7163. +
  7164. + /* We cannot replace the pointer in TImode insns
  7165. + as these has a differene addressing mode than the other
  7166. + memory insns. */
  7167. + if ( GET_MODE (SET_DEST (PATTERN (scan))) == TImode )
  7168. + break;
  7169. +
  7170. + /* Check if register is dead or set in this insn */
  7171. + if (dead_or_set_p (scan, reg))
  7172. + {
  7173. + add_only_used_as_pointer = true;
  7174. + break;
  7175. + }
  7176. + }
  7177. +
  7178. + if (add_only_used_as_pointer)
  7179. + {
  7180. + /* Lets delete the add insn and replace all memory references
  7181. + which uses the pointer with the full expression. */
  7182. + if (dump_file)
  7183. + {
  7184. + fprintf (dump_file,
  7185. + ";; Deleting INSN %i since address expression can be folded into all "
  7186. + "memory references using this expression\n",
  7187. + INSN_UID (insn));
  7188. + }
  7189. + SET_INSN_DELETED (insn);
  7190. + /* Force the instruction to be recognized again */
  7191. + INSN_CODE (insn) = -1;
  7192. +
  7193. + for (scan = NEXT_INSN (insn); scan; scan = NEXT_INSN (scan))
  7194. + {
  7195. + if (!INSN_P (scan))
  7196. + continue;
  7197. +
  7198. + if (!reg_mentioned_p (reg, PATTERN (scan)))
  7199. + continue;
  7200. +
  7201. + /* If used any other place than as a pointer or as the
  7202. + destination register we failed */
  7203. + if ((single_set (scan)
  7204. + && GET_CODE (PATTERN (scan)) == SET
  7205. + && ((MEM_P (SET_DEST (PATTERN (scan)))
  7206. + && REG_P (XEXP (SET_DEST (PATTERN (scan)), 0))
  7207. + && REGNO (XEXP (SET_DEST (PATTERN (scan)), 0)) ==
  7208. + REGNO (reg)) || (MEM_P (SET_SRC (PATTERN (scan)))
  7209. + &&
  7210. + REG_P (XEXP
  7211. + (SET_SRC (PATTERN (scan)),
  7212. + 0))
  7213. + &&
  7214. + REGNO (XEXP
  7215. + (SET_SRC (PATTERN (scan)),
  7216. + 0)) == REGNO (reg)))))
  7217. + {
  7218. + if (dump_file)
  7219. + {
  7220. + fprintf (dump_file,
  7221. + ";; Register %i replaced by indexed address in INSN %i\n",
  7222. + REGNO (reg), INSN_UID (scan));
  7223. + }
  7224. + if (MEM_P (SET_DEST (PATTERN (scan))))
  7225. + XEXP (SET_DEST (PATTERN (scan)), 0) = mem_expr;
  7226. + else
  7227. + XEXP (SET_SRC (PATTERN (scan)), 0) = mem_expr;
  7228. + }
  7229. +
  7230. + /* Check if register is dead or set in this insn */
  7231. + if (dead_or_set_p (scan, reg))
  7232. + {
  7233. + break;
  7234. + }
  7235. +
  7236. + }
  7237. + }
  7238. + }
  7239. + }
  7240. +
  7241. +
  7242. + if (TARGET_MD_REORG_OPTIMIZATION && (optimize_size || (optimize > 0)))
  7243. + {
  7244. +
  7245. + /* Scan through all insns looking for conditional register to
  7246. + register move operations */
  7247. + if (dump_file)
  7248. + {
  7249. + fprintf (dump_file,
  7250. + ";; Folding redundant conditional move operations:\n");
  7251. + }
  7252. + for (insn = first; insn; insn = next_nonnote_insn (insn))
  7253. + {
  7254. + rtx src_reg, dst_reg, scan, test;
  7255. +
  7256. + if (INSN_P (insn)
  7257. + && GET_CODE (PATTERN (insn)) == COND_EXEC
  7258. + && GET_CODE (COND_EXEC_CODE (PATTERN (insn))) == SET
  7259. + && REG_P (SET_SRC (COND_EXEC_CODE (PATTERN (insn))))
  7260. + && REG_P (SET_DEST (COND_EXEC_CODE (PATTERN (insn))))
  7261. + && find_reg_note (insn, REG_DEAD, SET_SRC (COND_EXEC_CODE (PATTERN (insn)))))
  7262. + {
  7263. + src_reg = SET_SRC (COND_EXEC_CODE (PATTERN (insn)));
  7264. + dst_reg = SET_DEST (COND_EXEC_CODE (PATTERN (insn)));
  7265. + test = COND_EXEC_TEST (PATTERN (insn));
  7266. + }
  7267. + else
  7268. + {
  7269. + continue;
  7270. + }
  7271. +
  7272. + /* Scan backward through the rest of insns in this if-then or if-else
  7273. + block and check if we can fold the move into another of the conditional
  7274. + insns in the same block. */
  7275. + scan = prev_nonnote_insn (insn);
  7276. + while (INSN_P (scan)
  7277. + && GET_CODE (PATTERN (scan)) == COND_EXEC
  7278. + && rtx_equal_p (COND_EXEC_TEST (PATTERN (scan)), test))
  7279. + {
  7280. + rtx pattern = COND_EXEC_CODE (PATTERN (scan));
  7281. + if ( GET_CODE (pattern) == PARALLEL )
  7282. + pattern = XVECEXP (pattern, 0, 0);
  7283. +
  7284. + if ( reg_set_p (src_reg, pattern) )
  7285. + {
  7286. + /* Fold in the destination register for the cond. move
  7287. + into this insn. */
  7288. + SET_DEST (pattern) = dst_reg;
  7289. + if (dump_file)
  7290. + {
  7291. + fprintf (dump_file,
  7292. + ";; Deleting INSN %i since this operation can be folded into INSN %i\n",
  7293. + INSN_UID (insn), INSN_UID (scan));
  7294. + }
  7295. +
  7296. + /* Scan and check if any of the insns in between uses the src_reg. We
  7297. + must then replace it with the dst_reg. */
  7298. + while ( (scan = next_nonnote_insn (scan)) != insn ){
  7299. + avr32_replace_reg (scan, src_reg, dst_reg);
  7300. + }
  7301. + /* Delete the insn. */
  7302. + SET_INSN_DELETED (insn);
  7303. +
  7304. + /* Force the instruction to be recognized again */
  7305. + INSN_CODE (insn) = -1;
  7306. + break;
  7307. + }
  7308. +
  7309. + /* If the destination register is used but not set in this insn
  7310. + we cannot fold. */
  7311. + if ( reg_mentioned_p (dst_reg, pattern) )
  7312. + break;
  7313. +
  7314. + scan = prev_nonnote_insn (scan);
  7315. + }
  7316. + }
  7317. + }
  7318. +
  7319. +}
  7320. +
  7321. +
  7322. +/* Exported to toplev.c.
  7323. +
  7324. + Do a final pass over the function, just before delayed branch
  7325. + scheduling. */
  7326. +static void
  7327. +avr32_reorg (void)
  7328. +{
  7329. + rtx insn;
  7330. + HOST_WIDE_INT address = 0;
  7331. + Mfix *fix;
  7332. +
  7333. + minipool_fix_head = minipool_fix_tail = NULL;
  7334. +
  7335. + /* The first insn must always be a note, or the code below won't scan it
  7336. + properly. */
  7337. + insn = get_insns ();
  7338. + if (GET_CODE (insn) != NOTE)
  7339. + abort ();
  7340. +
  7341. + /* Scan all the insns and record the operands that will need fixing. */
  7342. + for (insn = next_nonnote_insn (insn); insn; insn = next_nonnote_insn (insn))
  7343. + {
  7344. + if (GET_CODE (insn) == BARRIER)
  7345. + push_minipool_barrier (insn, address);
  7346. + else if (INSN_P (insn))
  7347. + {
  7348. + rtx table;
  7349. +
  7350. + note_invalid_constants (insn, address, true);
  7351. + address += get_attr_length (insn);
  7352. +
  7353. + /* If the insn is a vector jump, add the size of the table and skip
  7354. + the table. */
  7355. + if ((table = is_jump_table (insn)) != NULL)
  7356. + {
  7357. + address += get_jump_table_size (table);
  7358. + insn = table;
  7359. + }
  7360. + }
  7361. + }
  7362. +
  7363. + fix = minipool_fix_head;
  7364. +
  7365. + /* Now scan the fixups and perform the required changes. */
  7366. + while (fix)
  7367. + {
  7368. + Mfix *ftmp;
  7369. + Mfix *fdel;
  7370. + Mfix *last_added_fix;
  7371. + Mfix *last_barrier = NULL;
  7372. + Mfix *this_fix;
  7373. +
  7374. + /* Skip any further barriers before the next fix. */
  7375. + while (fix && GET_CODE (fix->insn) == BARRIER)
  7376. + fix = fix->next;
  7377. +
  7378. + /* No more fixes. */
  7379. + if (fix == NULL)
  7380. + break;
  7381. +
  7382. + last_added_fix = NULL;
  7383. +
  7384. + for (ftmp = fix; ftmp; ftmp = ftmp->next)
  7385. + {
  7386. + if (GET_CODE (ftmp->insn) == BARRIER)
  7387. + {
  7388. + if (ftmp->address >= minipool_vector_head->max_address)
  7389. + break;
  7390. +
  7391. + last_barrier = ftmp;
  7392. + }
  7393. + else if ((ftmp->minipool = add_minipool_forward_ref (ftmp)) == NULL)
  7394. + break;
  7395. +
  7396. + last_added_fix = ftmp; /* Keep track of the last fix added.
  7397. + */
  7398. + }
  7399. +
  7400. + /* If we found a barrier, drop back to that; any fixes that we could
  7401. + have reached but come after the barrier will now go in the next
  7402. + mini-pool. */
  7403. + if (last_barrier != NULL)
  7404. + {
  7405. + /* Reduce the refcount for those fixes that won't go into this pool
  7406. + after all. */
  7407. + for (fdel = last_barrier->next;
  7408. + fdel && fdel != ftmp; fdel = fdel->next)
  7409. + {
  7410. + fdel->minipool->refcount--;
  7411. + fdel->minipool = NULL;
  7412. + }
  7413. +
  7414. + ftmp = last_barrier;
  7415. + }
  7416. + else
  7417. + {
  7418. + /* ftmp is first fix that we can't fit into this pool and there no
  7419. + natural barriers that we could use. Insert a new barrier in the
  7420. + code somewhere between the previous fix and this one, and
  7421. + arrange to jump around it. */
  7422. + HOST_WIDE_INT max_address;
  7423. +
  7424. + /* The last item on the list of fixes must be a barrier, so we can
  7425. + never run off the end of the list of fixes without last_barrier
  7426. + being set. */
  7427. + if (ftmp == NULL)
  7428. + abort ();
  7429. +
  7430. + max_address = minipool_vector_head->max_address;
  7431. + /* Check that there isn't another fix that is in range that we
  7432. + couldn't fit into this pool because the pool was already too
  7433. + large: we need to put the pool before such an instruction. */
  7434. + if (ftmp->address < max_address)
  7435. + max_address = ftmp->address;
  7436. +
  7437. + last_barrier = create_fix_barrier (last_added_fix, max_address);
  7438. + }
  7439. +
  7440. + assign_minipool_offsets (last_barrier);
  7441. +
  7442. + while (ftmp)
  7443. + {
  7444. + if (GET_CODE (ftmp->insn) != BARRIER
  7445. + && ((ftmp->minipool = add_minipool_backward_ref (ftmp))
  7446. + == NULL))
  7447. + break;
  7448. +
  7449. + ftmp = ftmp->next;
  7450. + }
  7451. +
  7452. + /* Scan over the fixes we have identified for this pool, fixing them up
  7453. + and adding the constants to the pool itself. */
  7454. + for (this_fix = fix; this_fix && ftmp != this_fix;
  7455. + this_fix = this_fix->next)
  7456. + if (GET_CODE (this_fix->insn) != BARRIER
  7457. + /* Do nothing for entries present just to force the insertion of
  7458. + a minipool. */
  7459. + && !IS_FORCE_MINIPOOL (this_fix->value))
  7460. + {
  7461. + rtx addr = plus_constant (gen_rtx_LABEL_REF (VOIDmode,
  7462. + minipool_vector_label),
  7463. + this_fix->minipool->offset);
  7464. + *this_fix->loc = gen_rtx_MEM (this_fix->mode, addr);
  7465. + }
  7466. +
  7467. + dump_minipool (last_barrier->insn);
  7468. + fix = ftmp;
  7469. + }
  7470. +
  7471. + /* Free the minipool memory. */
  7472. + obstack_free (&minipool_obstack, minipool_startobj);
  7473. +
  7474. + avr32_reorg_optimization ();
  7475. +}
  7476. +
  7477. +
  7478. +/* Hook for doing some final scanning of instructions. Does nothing yet...*/
  7479. +void
  7480. +avr32_final_prescan_insn (rtx insn ATTRIBUTE_UNUSED,
  7481. + rtx * opvec ATTRIBUTE_UNUSED,
  7482. + int noperands ATTRIBUTE_UNUSED)
  7483. +{
  7484. + return;
  7485. +}
  7486. +
  7487. +
  7488. +/* Function for changing the condition on the next instruction,
  7489. + should be used when emmiting compare instructions and
  7490. + the condition of the next instruction needs to change.
  7491. +*/
  7492. +int
  7493. +set_next_insn_cond (rtx cur_insn, rtx new_cond)
  7494. +{
  7495. + rtx next_insn = next_nonnote_insn (cur_insn);
  7496. + if ((next_insn != NULL_RTX)
  7497. + && (INSN_P (next_insn)))
  7498. + {
  7499. + if ((GET_CODE (PATTERN (next_insn)) == SET)
  7500. + && (GET_CODE (SET_SRC (PATTERN (next_insn))) == IF_THEN_ELSE))
  7501. + {
  7502. + /* Branch instructions */
  7503. + XEXP (SET_SRC (PATTERN (next_insn)), 0) = new_cond;
  7504. + /* Force the instruction to be recognized again */
  7505. + INSN_CODE (next_insn) = -1;
  7506. + return TRUE;
  7507. + }
  7508. + else if ((GET_CODE (PATTERN (next_insn)) == SET)
  7509. + && avr32_comparison_operator (SET_SRC (PATTERN (next_insn)),
  7510. + GET_MODE (SET_SRC (PATTERN (next_insn)))))
  7511. + {
  7512. + /* scc with no compare */
  7513. + SET_SRC (PATTERN (next_insn)) = new_cond;
  7514. + /* Force the instruction to be recognized again */
  7515. + INSN_CODE (next_insn) = -1;
  7516. + return TRUE;
  7517. + }
  7518. + else if (GET_CODE (PATTERN (next_insn)) == COND_EXEC)
  7519. + {
  7520. + if ( GET_CODE (new_cond) == UNSPEC )
  7521. + {
  7522. + COND_EXEC_TEST (PATTERN (next_insn)) =
  7523. + gen_rtx_UNSPEC (CCmode,
  7524. + gen_rtvec (2,
  7525. + XEXP (COND_EXEC_TEST (PATTERN (next_insn)), 0),
  7526. + XEXP (COND_EXEC_TEST (PATTERN (next_insn)), 1)),
  7527. + XINT (new_cond, 1));
  7528. + }
  7529. + else
  7530. + {
  7531. + PUT_CODE(COND_EXEC_TEST (PATTERN (next_insn)), GET_CODE(new_cond));
  7532. + }
  7533. + }
  7534. + }
  7535. +
  7536. + return FALSE;
  7537. +}
  7538. +
  7539. +
  7540. +/* Function for obtaining the condition for the next instruction after cur_insn.
  7541. +*/
  7542. +rtx
  7543. +get_next_insn_cond (rtx cur_insn)
  7544. +{
  7545. + rtx next_insn = next_nonnote_insn (cur_insn);
  7546. + rtx cond = NULL_RTX;
  7547. + if (next_insn != NULL_RTX
  7548. + && INSN_P (next_insn))
  7549. + {
  7550. + if ((GET_CODE (PATTERN (next_insn)) == SET)
  7551. + && (GET_CODE (SET_SRC (PATTERN (next_insn))) == IF_THEN_ELSE))
  7552. + {
  7553. + /* Branch and cond if then else instructions */
  7554. + cond = XEXP (SET_SRC (PATTERN (next_insn)), 0);
  7555. + }
  7556. + else if ((GET_CODE (PATTERN (next_insn)) == SET)
  7557. + && avr32_comparison_operator (SET_SRC (PATTERN (next_insn)),
  7558. + GET_MODE (SET_SRC (PATTERN (next_insn)))))
  7559. + {
  7560. + /* scc with no compare */
  7561. + cond = SET_SRC (PATTERN (next_insn));
  7562. + }
  7563. + else if (GET_CODE (PATTERN (next_insn)) == COND_EXEC)
  7564. + {
  7565. + cond = COND_EXEC_TEST (PATTERN (next_insn));
  7566. + }
  7567. + }
  7568. + return cond;
  7569. +}
  7570. +
  7571. +
  7572. +/* Check if the next insn is a conditional insn that will emit a compare
  7573. + for itself.
  7574. +*/
  7575. +rtx
  7576. +next_insn_emits_cmp (rtx cur_insn)
  7577. +{
  7578. + rtx next_insn = next_nonnote_insn (cur_insn);
  7579. + rtx cond = NULL_RTX;
  7580. + if (next_insn != NULL_RTX
  7581. + && INSN_P (next_insn))
  7582. + {
  7583. + if ( ((GET_CODE (PATTERN (next_insn)) == SET)
  7584. + && (GET_CODE (SET_SRC (PATTERN (next_insn))) == IF_THEN_ELSE)
  7585. + && (XEXP (XEXP (SET_SRC (PATTERN (next_insn)), 0),0) != cc0_rtx))
  7586. + || GET_CODE (PATTERN (next_insn)) == COND_EXEC )
  7587. + return TRUE;
  7588. + }
  7589. + return FALSE;
  7590. +}
  7591. +
  7592. +
  7593. +rtx
  7594. +avr32_output_cmp (rtx cond, enum machine_mode mode, rtx op0, rtx op1)
  7595. +{
  7596. +
  7597. + rtx new_cond = NULL_RTX;
  7598. + rtx ops[2];
  7599. + rtx compare_pattern;
  7600. + ops[0] = op0;
  7601. + ops[1] = op1;
  7602. +
  7603. + if ( GET_CODE (op0) == AND )
  7604. + compare_pattern = op0;
  7605. + else
  7606. + compare_pattern = gen_rtx_COMPARE (mode, op0, op1);
  7607. +
  7608. + new_cond = is_compare_redundant (compare_pattern, cond);
  7609. +
  7610. + if (new_cond != NULL_RTX)
  7611. + return new_cond;
  7612. +
  7613. + /* Check if we are inserting a bit-load instead of a compare. */
  7614. + if ( GET_CODE (op0) == AND )
  7615. + {
  7616. + ops[0] = XEXP (op0, 0);
  7617. + ops[1] = XEXP (op0, 1);
  7618. + output_asm_insn ("bld\t%0, %p1", ops);
  7619. + return cond;
  7620. + }
  7621. +
  7622. + /* Insert compare */
  7623. + switch (mode)
  7624. + {
  7625. + case QImode:
  7626. + output_asm_insn ("cp.b\t%0, %1", ops);
  7627. + break;
  7628. + case HImode:
  7629. + output_asm_insn ("cp.h\t%0, %1", ops);
  7630. + break;
  7631. + case SImode:
  7632. + output_asm_insn ("cp.w\t%0, %1", ops);
  7633. + break;
  7634. + case DImode:
  7635. + if (GET_CODE (op1) != REG)
  7636. + output_asm_insn ("cp.w\t%0, %1\ncpc\t%m0", ops);
  7637. + else
  7638. + output_asm_insn ("cp.w\t%0, %1\ncpc\t%m0, %m1", ops);
  7639. + break;
  7640. + default:
  7641. + internal_error ("Unknown comparison mode");
  7642. + break;
  7643. + }
  7644. +
  7645. + return cond;
  7646. +}
  7647. +
  7648. +
  7649. +int
  7650. +avr32_load_multiple_operation (rtx op,
  7651. + enum machine_mode mode ATTRIBUTE_UNUSED)
  7652. +{
  7653. + int count = XVECLEN (op, 0);
  7654. + unsigned int dest_regno;
  7655. + rtx src_addr;
  7656. + rtx elt;
  7657. + int i = 1, base = 0;
  7658. +
  7659. + if (count <= 1 || GET_CODE (XVECEXP (op, 0, 0)) != SET)
  7660. + return 0;
  7661. +
  7662. + /* Check to see if this might be a write-back. */
  7663. + if (GET_CODE (SET_SRC (elt = XVECEXP (op, 0, 0))) == PLUS)
  7664. + {
  7665. + i++;
  7666. + base = 1;
  7667. +
  7668. + /* Now check it more carefully. */
  7669. + if (GET_CODE (SET_DEST (elt)) != REG
  7670. + || GET_CODE (XEXP (SET_SRC (elt), 0)) != REG
  7671. + || GET_CODE (XEXP (SET_SRC (elt), 1)) != CONST_INT
  7672. + || INTVAL (XEXP (SET_SRC (elt), 1)) != (count - 1) * 4)
  7673. + return 0;
  7674. + }
  7675. +
  7676. + /* Perform a quick check so we don't blow up below. */
  7677. + if (count <= 1
  7678. + || GET_CODE (XVECEXP (op, 0, i - 1)) != SET
  7679. + || GET_CODE (SET_DEST (XVECEXP (op, 0, i - 1))) != REG
  7680. + || GET_CODE (SET_SRC (XVECEXP (op, 0, i - 1))) != UNSPEC)
  7681. + return 0;
  7682. +
  7683. + dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, i - 1)));
  7684. + src_addr = XEXP (SET_SRC (XVECEXP (op, 0, i - 1)), 0);
  7685. +
  7686. + for (; i < count; i++)
  7687. + {
  7688. + elt = XVECEXP (op, 0, i);
  7689. +
  7690. + if (GET_CODE (elt) != SET
  7691. + || GET_CODE (SET_DEST (elt)) != REG
  7692. + || GET_MODE (SET_DEST (elt)) != SImode
  7693. + || GET_CODE (SET_SRC (elt)) != UNSPEC)
  7694. + return 0;
  7695. + }
  7696. +
  7697. + return 1;
  7698. +}
  7699. +
  7700. +
  7701. +int
  7702. +avr32_store_multiple_operation (rtx op,
  7703. + enum machine_mode mode ATTRIBUTE_UNUSED)
  7704. +{
  7705. + int count = XVECLEN (op, 0);
  7706. + int src_regno;
  7707. + rtx dest_addr;
  7708. + rtx elt;
  7709. + int i = 1;
  7710. +
  7711. + if (count <= 1 || GET_CODE (XVECEXP (op, 0, 0)) != SET)
  7712. + return 0;
  7713. +
  7714. + /* Perform a quick check so we don't blow up below. */
  7715. + if (count <= i
  7716. + || GET_CODE (XVECEXP (op, 0, i - 1)) != SET
  7717. + || GET_CODE (SET_DEST (XVECEXP (op, 0, i - 1))) != MEM
  7718. + || GET_CODE (SET_SRC (XVECEXP (op, 0, i - 1))) != UNSPEC)
  7719. + return 0;
  7720. +
  7721. + src_regno = REGNO (SET_SRC (XVECEXP (op, 0, i - 1)));
  7722. + dest_addr = XEXP (SET_DEST (XVECEXP (op, 0, i - 1)), 0);
  7723. +
  7724. + for (; i < count; i++)
  7725. + {
  7726. + elt = XVECEXP (op, 0, i);
  7727. +
  7728. + if (GET_CODE (elt) != SET
  7729. + || GET_CODE (SET_DEST (elt)) != MEM
  7730. + || GET_MODE (SET_DEST (elt)) != SImode
  7731. + || GET_CODE (SET_SRC (elt)) != UNSPEC)
  7732. + return 0;
  7733. + }
  7734. +
  7735. + return 1;
  7736. +}
  7737. +
  7738. +
  7739. +int
  7740. +avr32_valid_macmac_bypass (rtx insn_out, rtx insn_in)
  7741. +{
  7742. + /* Check if they use the same accumulator */
  7743. + if (rtx_equal_p
  7744. + (SET_DEST (PATTERN (insn_out)), SET_DEST (PATTERN (insn_in))))
  7745. + {
  7746. + return TRUE;
  7747. + }
  7748. +
  7749. + return FALSE;
  7750. +}
  7751. +
  7752. +
  7753. +int
  7754. +avr32_valid_mulmac_bypass (rtx insn_out, rtx insn_in)
  7755. +{
  7756. + /*
  7757. + Check if the mul instruction produces the accumulator for the mac
  7758. + instruction. */
  7759. + if (rtx_equal_p
  7760. + (SET_DEST (PATTERN (insn_out)), SET_DEST (PATTERN (insn_in))))
  7761. + {
  7762. + return TRUE;
  7763. + }
  7764. + return FALSE;
  7765. +}
  7766. +
  7767. +
  7768. +int
  7769. +avr32_store_bypass (rtx insn_out, rtx insn_in)
  7770. +{
  7771. + /* Only valid bypass if the output result is used as an src in the store
  7772. + instruction, NOT if used as a pointer or base. */
  7773. + if (rtx_equal_p
  7774. + (SET_DEST (PATTERN (insn_out)), SET_SRC (PATTERN (insn_in))))
  7775. + {
  7776. + return TRUE;
  7777. + }
  7778. +
  7779. + return FALSE;
  7780. +}
  7781. +
  7782. +
  7783. +int
  7784. +avr32_mul_waw_bypass (rtx insn_out, rtx insn_in)
  7785. +{
  7786. + /* Check if the register holding the result from the mul instruction is
  7787. + used as a result register in the input instruction. */
  7788. + if (rtx_equal_p
  7789. + (SET_DEST (PATTERN (insn_out)), SET_DEST (PATTERN (insn_in))))
  7790. + {
  7791. + return TRUE;
  7792. + }
  7793. +
  7794. + return FALSE;
  7795. +}
  7796. +
  7797. +
  7798. +int
  7799. +avr32_valid_load_double_bypass (rtx insn_out, rtx insn_in)
  7800. +{
  7801. + /* Check if the first loaded word in insn_out is used in insn_in. */
  7802. + rtx dst_reg;
  7803. + rtx second_loaded_reg;
  7804. +
  7805. + /* If this is a double alu operation then the bypass is not valid */
  7806. + if ((get_attr_type (insn_in) == TYPE_ALU
  7807. + || get_attr_type (insn_in) == TYPE_ALU2)
  7808. + && (GET_MODE_SIZE (GET_MODE (SET_DEST (PATTERN (insn_out)))) > 4))
  7809. + return FALSE;
  7810. +
  7811. + /* Get the destination register in the load */
  7812. + if (!REG_P (SET_DEST (PATTERN (insn_out))))
  7813. + return FALSE;
  7814. +
  7815. + dst_reg = SET_DEST (PATTERN (insn_out));
  7816. + second_loaded_reg = gen_rtx_REG (SImode, REGNO (dst_reg) + 1);
  7817. +
  7818. + if (!reg_mentioned_p (second_loaded_reg, PATTERN (insn_in)))
  7819. + return TRUE;
  7820. +
  7821. + return FALSE;
  7822. +}
  7823. +
  7824. +
  7825. +int
  7826. +avr32_valid_load_quad_bypass (rtx insn_out, rtx insn_in)
  7827. +{
  7828. + /*
  7829. + Check if the two first loaded word in insn_out are used in insn_in. */
  7830. + rtx dst_reg;
  7831. + rtx third_loaded_reg, fourth_loaded_reg;
  7832. +
  7833. + /* Get the destination register in the load */
  7834. + if (!REG_P (SET_DEST (PATTERN (insn_out))))
  7835. + return FALSE;
  7836. +
  7837. + dst_reg = SET_DEST (PATTERN (insn_out));
  7838. + third_loaded_reg = gen_rtx_REG (SImode, REGNO (dst_reg) + 2);
  7839. + fourth_loaded_reg = gen_rtx_REG (SImode, REGNO (dst_reg) + 3);
  7840. +
  7841. + if (!reg_mentioned_p (third_loaded_reg, PATTERN (insn_in))
  7842. + && !reg_mentioned_p (fourth_loaded_reg, PATTERN (insn_in)))
  7843. + {
  7844. + return TRUE;
  7845. + }
  7846. +
  7847. + return FALSE;
  7848. +}
  7849. +
  7850. +
  7851. +rtx
  7852. +avr32_ifcvt_modify_test (ce_if_block_t *ce_info, rtx test )
  7853. +{
  7854. + rtx branch_insn;
  7855. + rtx cmp_test;
  7856. + rtx compare_op0;
  7857. + rtx compare_op1;
  7858. +
  7859. +
  7860. + if ( !ce_info
  7861. + || test == NULL_RTX
  7862. + || !reg_mentioned_p (cc0_rtx, test))
  7863. + return test;
  7864. +
  7865. + branch_insn = BB_END (ce_info->test_bb);
  7866. + cmp_test = PATTERN(prev_nonnote_insn (branch_insn));
  7867. +
  7868. + if (GET_CODE(cmp_test) != SET
  7869. + || !CC0_P(XEXP(cmp_test, 0)) )
  7870. + return cmp_test;
  7871. +
  7872. + if ( GET_CODE(SET_SRC(cmp_test)) == COMPARE ){
  7873. + compare_op0 = XEXP(SET_SRC(cmp_test), 0);
  7874. + compare_op1 = XEXP(SET_SRC(cmp_test), 1);
  7875. + } else {
  7876. + compare_op0 = SET_SRC(cmp_test);
  7877. + compare_op1 = const0_rtx;
  7878. + }
  7879. +
  7880. + return gen_rtx_fmt_ee (GET_CODE(test), GET_MODE (compare_op0),
  7881. + compare_op0, compare_op1);
  7882. +}
  7883. +
  7884. +
  7885. +rtx
  7886. +avr32_ifcvt_modify_insn (ce_if_block_t *ce_info, rtx pattern, rtx insn,
  7887. + int *num_true_changes)
  7888. +{
  7889. + rtx test = COND_EXEC_TEST(pattern);
  7890. + rtx op = COND_EXEC_CODE(pattern);
  7891. + rtx cmp_insn;
  7892. + rtx cond_exec_insn;
  7893. + int inputs_set_outside_ifblock = 1;
  7894. + basic_block current_bb = BLOCK_FOR_INSN (insn);
  7895. + rtx bb_insn ;
  7896. + enum machine_mode mode = GET_MODE (XEXP (op, 0));
  7897. +
  7898. + if (CC0_P(XEXP(test, 0)))
  7899. + test = avr32_ifcvt_modify_test (ce_info,
  7900. + test );
  7901. +
  7902. + /* We do not support multiple tests. */
  7903. + if ( ce_info
  7904. + && ce_info->num_multiple_test_blocks > 0 )
  7905. + return NULL_RTX;
  7906. +
  7907. + pattern = gen_rtx_COND_EXEC (VOIDmode, test, op);
  7908. +
  7909. + if ( !reload_completed )
  7910. + {
  7911. + rtx start;
  7912. + int num_insns;
  7913. + int max_insns = MAX_CONDITIONAL_EXECUTE;
  7914. +
  7915. + if ( !ce_info )
  7916. + return op;
  7917. +
  7918. + /* Check if the insn is not suitable for conditional
  7919. + execution. */
  7920. + start_sequence ();
  7921. + cond_exec_insn = emit_insn (pattern);
  7922. + if ( recog_memoized (cond_exec_insn) < 0
  7923. + && can_create_pseudo_p () )
  7924. + {
  7925. + /* Insn is not suitable for conditional execution, try
  7926. + to fix it up by using an extra scratch register or
  7927. + by pulling the operation outside the if-then-else
  7928. + and then emiting a conditional move inside the if-then-else. */
  7929. + end_sequence ();
  7930. + if ( GET_CODE (op) != SET
  7931. + || !REG_P (SET_DEST (op))
  7932. + || GET_CODE (SET_SRC (op)) == IF_THEN_ELSE
  7933. + || GET_MODE_SIZE (mode) > UNITS_PER_WORD )
  7934. + return NULL_RTX;
  7935. +
  7936. + /* Check if any of the input operands to the insn is set inside the
  7937. + current block. */
  7938. + if ( current_bb->index == ce_info->then_bb->index )
  7939. + start = PREV_INSN (BB_HEAD (ce_info->then_bb));
  7940. + else
  7941. + start = PREV_INSN (BB_HEAD (ce_info->else_bb));
  7942. +
  7943. +
  7944. + for ( bb_insn = next_nonnote_insn (start); bb_insn != insn; bb_insn = next_nonnote_insn (bb_insn) )
  7945. + {
  7946. + rtx set = single_set (bb_insn);
  7947. +
  7948. + if ( set && reg_mentioned_p (SET_DEST (set), SET_SRC (op)))
  7949. + {
  7950. + inputs_set_outside_ifblock = 0;
  7951. + break;
  7952. + }
  7953. + }
  7954. +
  7955. + cmp_insn = prev_nonnote_insn (BB_END (ce_info->test_bb));
  7956. +
  7957. +
  7958. + /* Check if we can insert more insns. */
  7959. + num_insns = ( ce_info->num_then_insns +
  7960. + ce_info->num_else_insns +
  7961. + ce_info->num_cond_clobber_insns +
  7962. + ce_info->num_extra_move_insns );
  7963. +
  7964. + if ( ce_info->num_else_insns != 0 )
  7965. + max_insns *=2;
  7966. +
  7967. + if ( num_insns >= max_insns )
  7968. + return NULL_RTX;
  7969. +
  7970. + /* Check if we have an instruction which might be converted to
  7971. + conditional form if we give it a scratch register to clobber. */
  7972. + {
  7973. + rtx clobber_insn;
  7974. + rtx scratch_reg = gen_reg_rtx (mode);
  7975. + rtx new_pattern = copy_rtx (pattern);
  7976. + rtx set_src = SET_SRC (COND_EXEC_CODE (new_pattern));
  7977. +
  7978. + rtx clobber = gen_rtx_CLOBBER (mode, scratch_reg);
  7979. + rtx vec[2] = { COND_EXEC_CODE (new_pattern), clobber };
  7980. + COND_EXEC_CODE (new_pattern) = gen_rtx_PARALLEL (mode, gen_rtvec_v (2, vec));
  7981. +
  7982. + start_sequence ();
  7983. + clobber_insn = emit_insn (new_pattern);
  7984. +
  7985. + if ( recog_memoized (clobber_insn) >= 0
  7986. + && ( ( GET_RTX_LENGTH (GET_CODE (set_src)) == 2
  7987. + && CONST_INT_P (XEXP (set_src, 1))
  7988. + && avr32_const_ok_for_constraint_p (INTVAL (XEXP (set_src, 1)), 'K', "Ks08") )
  7989. + || !ce_info->else_bb
  7990. + || current_bb->index == ce_info->else_bb->index ))
  7991. + {
  7992. + end_sequence ();
  7993. + /* Force the insn to be recognized again. */
  7994. + INSN_CODE (insn) = -1;
  7995. +
  7996. + /* If this is the first change in this IF-block then
  7997. + signal that we have made a change. */
  7998. + if ( ce_info->num_cond_clobber_insns == 0
  7999. + && ce_info->num_extra_move_insns == 0 )
  8000. + *num_true_changes += 1;
  8001. +
  8002. + ce_info->num_cond_clobber_insns++;
  8003. +
  8004. + if (dump_file)
  8005. + fprintf (dump_file,
  8006. + "\nReplacing INSN %d with an insn using a scratch register for later ifcvt passes...\n",
  8007. + INSN_UID (insn));
  8008. +
  8009. + return COND_EXEC_CODE (new_pattern);
  8010. + }
  8011. + end_sequence ();
  8012. + }
  8013. +
  8014. + if ( inputs_set_outside_ifblock )
  8015. + {
  8016. + /* Check if the insn before the cmp is an and which used
  8017. + together with the cmp can be optimized into a bld. If
  8018. + so then we should try to put the insn before the and
  8019. + so that we can catch the bld peephole. */
  8020. + rtx set;
  8021. + rtx insn_before_cmp_insn = prev_nonnote_insn (cmp_insn);
  8022. + if (insn_before_cmp_insn
  8023. + && (set = single_set (insn_before_cmp_insn))
  8024. + && GET_CODE (SET_SRC (set)) == AND
  8025. + && one_bit_set_operand (XEXP (SET_SRC (set), 1), SImode)
  8026. + /* Also make sure that the insn does not set any
  8027. + of the input operands to the insn we are pulling out. */
  8028. + && !reg_mentioned_p (SET_DEST (set), SET_SRC (op)) )
  8029. + cmp_insn = prev_nonnote_insn (cmp_insn);
  8030. +
  8031. + /* We can try to put the operation outside the if-then-else
  8032. + blocks and insert a move. */
  8033. + if ( !insn_invalid_p (insn)
  8034. + /* Do not allow conditional insns to be moved outside the
  8035. + if-then-else. */
  8036. + && !reg_mentioned_p (cc0_rtx, insn)
  8037. + /* We cannot move memory loads outside of the if-then-else
  8038. + since the memory access should not be perfomed if the
  8039. + condition is not met. */
  8040. + && !mem_mentioned_p (SET_SRC (op)) )
  8041. + {
  8042. + rtx scratch_reg = gen_reg_rtx (mode);
  8043. + rtx op_pattern = copy_rtx (op);
  8044. + rtx new_insn, seq;
  8045. + rtx link, prev_link;
  8046. + op = copy_rtx (op);
  8047. + /* Emit the operation to a temp reg before the compare,
  8048. + and emit a move inside the if-then-else, hoping that the
  8049. + whole if-then-else can be converted to conditional
  8050. + execution. */
  8051. + SET_DEST (op_pattern) = scratch_reg;
  8052. + start_sequence ();
  8053. + new_insn = emit_insn (op_pattern);
  8054. + seq = get_insns();
  8055. + end_sequence ();
  8056. +
  8057. + /* Check again that the insn is valid. For some insns the insn might
  8058. + become invalid if the destination register is changed. Ie. for mulacc
  8059. + operations. */
  8060. + if ( insn_invalid_p (new_insn) )
  8061. + return NULL_RTX;
  8062. +
  8063. + emit_insn_before_setloc (seq, cmp_insn, INSN_LOCATOR (insn));
  8064. +
  8065. + if (dump_file)
  8066. + fprintf (dump_file,
  8067. + "\nMoving INSN %d out of IF-block by adding INSN %d...\n",
  8068. + INSN_UID (insn), INSN_UID (new_insn));
  8069. +
  8070. + ce_info->extra_move_insns[ce_info->num_extra_move_insns] = insn;
  8071. + ce_info->moved_insns[ce_info->num_extra_move_insns] = new_insn;
  8072. + XEXP (op, 1) = scratch_reg;
  8073. + /* Force the insn to be recognized again. */
  8074. + INSN_CODE (insn) = -1;
  8075. +
  8076. + /* Move REG_DEAD notes to the moved insn. */
  8077. + prev_link = NULL_RTX;
  8078. + for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
  8079. + {
  8080. + if (REG_NOTE_KIND (link) == REG_DEAD)
  8081. + {
  8082. + /* Add the REG_DEAD note to the new insn. */
  8083. + rtx dead_reg = XEXP (link, 0);
  8084. + REG_NOTES (new_insn) = gen_rtx_EXPR_LIST (REG_DEAD, dead_reg, REG_NOTES (new_insn));
  8085. + /* Remove the REG_DEAD note from the insn we convert to a move. */
  8086. + if ( prev_link )
  8087. + XEXP (prev_link, 1) = XEXP (link, 1);
  8088. + else
  8089. + REG_NOTES (insn) = XEXP (link, 1);
  8090. + }
  8091. + else
  8092. + {
  8093. + prev_link = link;
  8094. + }
  8095. + }
  8096. + /* Add a REG_DEAD note to signal that the scratch register is dead. */
  8097. + REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_DEAD, scratch_reg, REG_NOTES (insn));
  8098. +
  8099. + /* If this is the first change in this IF-block then
  8100. + signal that we have made a change. */
  8101. + if ( ce_info->num_cond_clobber_insns == 0
  8102. + && ce_info->num_extra_move_insns == 0 )
  8103. + *num_true_changes += 1;
  8104. +
  8105. + ce_info->num_extra_move_insns++;
  8106. + return op;
  8107. + }
  8108. + }
  8109. +
  8110. + /* We failed to fixup the insns, so this if-then-else can not be made
  8111. + conditional. Just return NULL_RTX so that the if-then-else conversion
  8112. + for this if-then-else will be cancelled. */
  8113. + return NULL_RTX;
  8114. + }
  8115. + end_sequence ();
  8116. + return op;
  8117. + }
  8118. +
  8119. + /* Signal that we have started if conversion after reload, which means
  8120. + that it should be safe to split all the predicable clobber insns which
  8121. + did not become cond_exec back into a simpler form if possible. */
  8122. + cfun->machine->ifcvt_after_reload = 1;
  8123. +
  8124. + return pattern;
  8125. +}
  8126. +
  8127. +
  8128. +void
  8129. +avr32_ifcvt_modify_cancel ( ce_if_block_t *ce_info, int *num_true_changes)
  8130. +{
  8131. + int n;
  8132. +
  8133. + if ( ce_info->num_extra_move_insns > 0
  8134. + && ce_info->num_cond_clobber_insns == 0)
  8135. + /* Signal that we did not do any changes after all. */
  8136. + *num_true_changes -= 1;
  8137. +
  8138. + /* Remove any inserted move insns. */
  8139. + for ( n = 0; n < ce_info->num_extra_move_insns; n++ )
  8140. + {
  8141. + rtx link, prev_link;
  8142. +
  8143. + /* Remove REG_DEAD note since we are not needing the scratch register anyway. */
  8144. + prev_link = NULL_RTX;
  8145. + for (link = REG_NOTES (ce_info->extra_move_insns[n]); link; link = XEXP (link, 1))
  8146. + {
  8147. + if (REG_NOTE_KIND (link) == REG_DEAD)
  8148. + {
  8149. + if ( prev_link )
  8150. + XEXP (prev_link, 1) = XEXP (link, 1);
  8151. + else
  8152. + REG_NOTES (ce_info->extra_move_insns[n]) = XEXP (link, 1);
  8153. + }
  8154. + else
  8155. + {
  8156. + prev_link = link;
  8157. + }
  8158. + }
  8159. +
  8160. + /* Revert all reg_notes for the moved insn. */
  8161. + for (link = REG_NOTES (ce_info->moved_insns[n]); link; link = XEXP (link, 1))
  8162. + {
  8163. + REG_NOTES (ce_info->extra_move_insns[n]) = gen_rtx_EXPR_LIST (REG_NOTE_KIND (link),
  8164. + XEXP (link, 0),
  8165. + REG_NOTES (ce_info->extra_move_insns[n]));
  8166. + }
  8167. +
  8168. + /* Remove the moved insn. */
  8169. + remove_insn ( ce_info->moved_insns[n] );
  8170. + }
  8171. +}
  8172. +
  8173. +
  8174. +/* Function returning TRUE if INSN with OPERANDS is a splittable
  8175. + conditional immediate clobber insn. We assume that the insn is
  8176. + already a conditional immediate clobber insns and do not check
  8177. + for that. */
  8178. +int
  8179. +avr32_cond_imm_clobber_splittable (rtx insn, rtx operands[])
  8180. +{
  8181. + if ( REGNO (operands[0]) == REGNO (operands[1]) )
  8182. + {
  8183. + if ( (GET_CODE (SET_SRC (XVECEXP (PATTERN (insn),0,0))) == PLUS
  8184. + && !avr32_const_ok_for_constraint_p (INTVAL (operands[2]), 'I', "Is21"))
  8185. + || (GET_CODE (SET_SRC (XVECEXP (PATTERN (insn),0,0))) == MINUS
  8186. + && !avr32_const_ok_for_constraint_p (INTVAL (operands[2]), 'K', "Ks21")))
  8187. + return FALSE;
  8188. + }
  8189. + else if ( (logical_binary_operator (SET_SRC (XVECEXP (PATTERN (insn),0,0)), VOIDmode)
  8190. + || (GET_CODE (SET_SRC (XVECEXP (PATTERN (insn),0,0))) == PLUS
  8191. + && !avr32_const_ok_for_constraint_p (INTVAL (operands[2]), 'I', "Is16"))
  8192. + || (GET_CODE (SET_SRC (XVECEXP (PATTERN (insn),0,0))) == MINUS
  8193. + && !avr32_const_ok_for_constraint_p (INTVAL (operands[2]), 'K', "Ks16"))) )
  8194. + return FALSE;
  8195. +
  8196. + return TRUE;
  8197. +}
  8198. +
  8199. +
  8200. +/* Function for getting an integer value from a const_int or const_double
  8201. + expression regardless of the HOST_WIDE_INT size. Each target cpu word
  8202. + will be put into the val array where the LSW will be stored at the lowest
  8203. + address and so forth. Assumes that const_expr is either a const_int or
  8204. + const_double. Only valid for modes which have sizes that are a multiple
  8205. + of the word size.
  8206. +*/
  8207. +void
  8208. +avr32_get_intval (enum machine_mode mode, rtx const_expr, HOST_WIDE_INT *val)
  8209. +{
  8210. + int words_in_mode = GET_MODE_SIZE (mode)/UNITS_PER_WORD;
  8211. + const int words_in_const_int = HOST_BITS_PER_WIDE_INT / BITS_PER_WORD;
  8212. +
  8213. + if ( GET_CODE(const_expr) == CONST_DOUBLE ){
  8214. + HOST_WIDE_INT hi = CONST_DOUBLE_HIGH(const_expr);
  8215. + HOST_WIDE_INT lo = CONST_DOUBLE_LOW(const_expr);
  8216. + /* Evaluate hi and lo values of const_double. */
  8217. + avr32_get_intval (mode_for_size (HOST_BITS_PER_WIDE_INT, MODE_INT, 0),
  8218. + GEN_INT (lo),
  8219. + &val[0]);
  8220. + avr32_get_intval (mode_for_size (HOST_BITS_PER_WIDE_INT, MODE_INT, 0),
  8221. + GEN_INT (hi),
  8222. + &val[words_in_const_int]);
  8223. + } else if ( GET_CODE(const_expr) == CONST_INT ){
  8224. + HOST_WIDE_INT value = INTVAL(const_expr);
  8225. + int word;
  8226. + for ( word = 0; (word < words_in_mode) && (word < words_in_const_int); word++ ){
  8227. + /* Shift word up to the MSW and shift down again to extract the
  8228. + word and sign-extend. */
  8229. + int lshift = (words_in_const_int - word - 1) * BITS_PER_WORD;
  8230. + int rshift = (words_in_const_int-1) * BITS_PER_WORD;
  8231. + val[word] = (value << lshift) >> rshift;
  8232. + }
  8233. +
  8234. + for ( ; word < words_in_mode; word++ ){
  8235. + /* Just put the sign bits in the remaining words. */
  8236. + val[word] = value < 0 ? -1 : 0;
  8237. + }
  8238. + }
  8239. +}
  8240. +
  8241. +
  8242. +void
  8243. +avr32_split_const_expr (enum machine_mode mode, enum machine_mode new_mode,
  8244. + rtx expr, rtx *split_expr)
  8245. +{
  8246. + int i, word;
  8247. + int words_in_intval = GET_MODE_SIZE (mode)/UNITS_PER_WORD;
  8248. + int words_in_split_values = GET_MODE_SIZE (new_mode)/UNITS_PER_WORD;
  8249. + const int words_in_const_int = HOST_BITS_PER_WIDE_INT / BITS_PER_WORD;
  8250. + HOST_WIDE_INT *val = alloca (words_in_intval * UNITS_PER_WORD);
  8251. +
  8252. + avr32_get_intval (mode, expr, val);
  8253. +
  8254. + for ( i=0; i < (words_in_intval/words_in_split_values); i++ )
  8255. + {
  8256. + HOST_WIDE_INT value_lo = 0, value_hi = 0;
  8257. + for ( word = 0; word < words_in_split_values; word++ )
  8258. + {
  8259. + if ( word >= words_in_const_int )
  8260. + value_hi |= ((val[i * words_in_split_values + word] &
  8261. + (((HOST_WIDE_INT)1 << BITS_PER_WORD)-1))
  8262. + << (BITS_PER_WORD * (word - words_in_const_int)));
  8263. + else
  8264. + value_lo |= ((val[i * words_in_split_values + word] &
  8265. + (((HOST_WIDE_INT)1 << BITS_PER_WORD)-1))
  8266. + << (BITS_PER_WORD * word));
  8267. + }
  8268. + split_expr[i] = immed_double_const(value_lo, value_hi, new_mode);
  8269. + }
  8270. +}
  8271. +
  8272. +
  8273. +/* Set up library functions to comply to AVR32 ABI */
  8274. +static void
  8275. +avr32_init_libfuncs (void)
  8276. +{
  8277. + /* Convert gcc run-time function names to AVR32 ABI names */
  8278. +
  8279. + /* Double-precision floating-point arithmetic. */
  8280. + set_optab_libfunc (neg_optab, DFmode, NULL);
  8281. +
  8282. + /* Double-precision comparisons. */
  8283. + set_optab_libfunc (eq_optab, DFmode, "__avr32_f64_cmp_eq");
  8284. + set_optab_libfunc (ne_optab, DFmode, NULL);
  8285. + set_optab_libfunc (lt_optab, DFmode, "__avr32_f64_cmp_lt");
  8286. + set_optab_libfunc (le_optab, DFmode, NULL);
  8287. + set_optab_libfunc (ge_optab, DFmode, "__avr32_f64_cmp_ge");
  8288. + set_optab_libfunc (gt_optab, DFmode, NULL);
  8289. +
  8290. + /* Single-precision floating-point arithmetic. */
  8291. + set_optab_libfunc (smul_optab, SFmode, "__avr32_f32_mul");
  8292. + set_optab_libfunc (neg_optab, SFmode, NULL);
  8293. +
  8294. + /* Single-precision comparisons. */
  8295. + set_optab_libfunc (eq_optab, SFmode, "__avr32_f32_cmp_eq");
  8296. + set_optab_libfunc (ne_optab, SFmode, NULL);
  8297. + set_optab_libfunc (lt_optab, SFmode, "__avr32_f32_cmp_lt");
  8298. + set_optab_libfunc (le_optab, SFmode, NULL);
  8299. + set_optab_libfunc (ge_optab, SFmode, "__avr32_f32_cmp_ge");
  8300. + set_optab_libfunc (gt_optab, SFmode, NULL);
  8301. +
  8302. + /* Floating-point to integer conversions. */
  8303. + set_conv_libfunc (sfix_optab, SImode, DFmode, "__avr32_f64_to_s32");
  8304. + set_conv_libfunc (ufix_optab, SImode, DFmode, "__avr32_f64_to_u32");
  8305. + set_conv_libfunc (sfix_optab, DImode, DFmode, "__avr32_f64_to_s64");
  8306. + set_conv_libfunc (ufix_optab, DImode, DFmode, "__avr32_f64_to_u64");
  8307. + set_conv_libfunc (sfix_optab, SImode, SFmode, "__avr32_f32_to_s32");
  8308. + set_conv_libfunc (ufix_optab, SImode, SFmode, "__avr32_f32_to_u32");
  8309. + set_conv_libfunc (sfix_optab, DImode, SFmode, "__avr32_f32_to_s64");
  8310. + set_conv_libfunc (ufix_optab, DImode, SFmode, "__avr32_f32_to_u64");
  8311. +
  8312. + /* Conversions between floating types. */
  8313. + set_conv_libfunc (trunc_optab, SFmode, DFmode, "__avr32_f64_to_f32");
  8314. + set_conv_libfunc (sext_optab, DFmode, SFmode, "__avr32_f32_to_f64");
  8315. +
  8316. + /* Integer to floating-point conversions. Table 8. */
  8317. + set_conv_libfunc (sfloat_optab, DFmode, SImode, "__avr32_s32_to_f64");
  8318. + set_conv_libfunc (sfloat_optab, DFmode, DImode, "__avr32_s64_to_f64");
  8319. + set_conv_libfunc (sfloat_optab, SFmode, SImode, "__avr32_s32_to_f32");
  8320. + set_conv_libfunc (sfloat_optab, SFmode, DImode, "__avr32_s64_to_f32");
  8321. + set_conv_libfunc (ufloat_optab, DFmode, SImode, "__avr32_u32_to_f64");
  8322. + set_conv_libfunc (ufloat_optab, SFmode, SImode, "__avr32_u32_to_f32");
  8323. + /* TODO: Add these to gcc library functions */
  8324. + //set_conv_libfunc (ufloat_optab, DFmode, DImode, NULL);
  8325. + //set_conv_libfunc (ufloat_optab, SFmode, DImode, NULL);
  8326. +
  8327. + /* Long long. Table 9. */
  8328. + set_optab_libfunc (smul_optab, DImode, "__avr32_mul64");
  8329. + set_optab_libfunc (sdiv_optab, DImode, "__avr32_sdiv64");
  8330. + set_optab_libfunc (udiv_optab, DImode, "__avr32_udiv64");
  8331. + set_optab_libfunc (smod_optab, DImode, "__avr32_smod64");
  8332. + set_optab_libfunc (umod_optab, DImode, "__avr32_umod64");
  8333. + set_optab_libfunc (ashl_optab, DImode, "__avr32_lsl64");
  8334. + set_optab_libfunc (lshr_optab, DImode, "__avr32_lsr64");
  8335. + set_optab_libfunc (ashr_optab, DImode, "__avr32_asr64");
  8336. +
  8337. + /* Floating point library functions which have fast versions. */
  8338. + if ( TARGET_FAST_FLOAT )
  8339. + {
  8340. + set_optab_libfunc (sdiv_optab, DFmode, "__avr32_f64_div_fast");
  8341. + set_optab_libfunc (smul_optab, DFmode, "__avr32_f64_mul_fast");
  8342. + set_optab_libfunc (add_optab, DFmode, "__avr32_f64_add_fast");
  8343. + set_optab_libfunc (sub_optab, DFmode, "__avr32_f64_sub_fast");
  8344. + set_optab_libfunc (add_optab, SFmode, "__avr32_f32_add_fast");
  8345. + set_optab_libfunc (sub_optab, SFmode, "__avr32_f32_sub_fast");
  8346. + set_optab_libfunc (sdiv_optab, SFmode, "__avr32_f32_div_fast");
  8347. + }
  8348. + else
  8349. + {
  8350. + set_optab_libfunc (sdiv_optab, DFmode, "__avr32_f64_div");
  8351. + set_optab_libfunc (smul_optab, DFmode, "__avr32_f64_mul");
  8352. + set_optab_libfunc (add_optab, DFmode, "__avr32_f64_add");
  8353. + set_optab_libfunc (sub_optab, DFmode, "__avr32_f64_sub");
  8354. + set_optab_libfunc (add_optab, SFmode, "__avr32_f32_add");
  8355. + set_optab_libfunc (sub_optab, SFmode, "__avr32_f32_sub");
  8356. + set_optab_libfunc (sdiv_optab, SFmode, "__avr32_f32_div");
  8357. + }
  8358. +}
  8359. +
  8360. +
  8361. +/* Record a flashvault declaration. */
  8362. +static void
  8363. +flashvault_decl_list_add (unsigned int vector_num, const char *name)
  8364. +{
  8365. + struct flashvault_decl_list *p;
  8366. +
  8367. + p = (struct flashvault_decl_list *)
  8368. + xmalloc (sizeof (struct flashvault_decl_list));
  8369. + p->next = flashvault_decl_list_head;
  8370. + p->name = name;
  8371. + p->vector_num = vector_num;
  8372. + flashvault_decl_list_head = p;
  8373. +}
  8374. +
  8375. +
  8376. +static void
  8377. +avr32_file_end (void)
  8378. +{
  8379. + struct flashvault_decl_list *p;
  8380. + unsigned int num_entries = 0;
  8381. +
  8382. + /* Check if a list of flashvault declarations exists. */
  8383. + if (flashvault_decl_list_head != NULL)
  8384. + {
  8385. + /* Calculate the number of entries in the table. */
  8386. + for (p = flashvault_decl_list_head; p != NULL; p = p->next)
  8387. + {
  8388. + num_entries++;
  8389. + }
  8390. +
  8391. + /* Generate the beginning of the flashvault data table. */
  8392. + fputs ("\t.global __fv_table\n"
  8393. + "\t.data\n"
  8394. + "\t.align 2\n"
  8395. + "\t.set .LFVTABLE, . + 0\n"
  8396. + "\t.type __fv_table, @object\n", asm_out_file);
  8397. + /* Each table entry is 8 bytes. */
  8398. + fprintf (asm_out_file, "\t.size __fv_table, %u\n", (num_entries * 8));
  8399. +
  8400. + fputs("__fv_table:\n", asm_out_file);
  8401. +
  8402. + for (p = flashvault_decl_list_head; p != NULL; p = p->next)
  8403. + {
  8404. + /* Output table entry. */
  8405. + fprintf (asm_out_file,
  8406. + "\t.align 2\n"
  8407. + "\t.int %u\n", p->vector_num);
  8408. + fprintf (asm_out_file,
  8409. + "\t.align 2\n"
  8410. + "\t.int %s\n", p->name);
  8411. + }
  8412. + }
  8413. +}
  8414. diff -Nur gcc-4.4.6.orig/gcc/config/avr32/avr32.h gcc-4.4.6/gcc/config/avr32/avr32.h
  8415. --- gcc-4.4.6.orig/gcc/config/avr32/avr32.h 1970-01-01 01:00:00.000000000 +0100
  8416. +++ gcc-4.4.6/gcc/config/avr32/avr32.h 2011-10-22 19:23:08.520581302 +0200
  8417. @@ -0,0 +1,3316 @@
  8418. +/*
  8419. + Definitions of target machine for AVR32.
  8420. + Copyright 2003,2004,2005,2006,2007,2008,2009,2010 Atmel Corporation.
  8421. +
  8422. + This file is part of GCC.
  8423. +
  8424. + This program is free software; you can redistribute it and/or modify
  8425. + it under the terms of the GNU General Public License as published by
  8426. + the Free Software Foundation; either version 2 of the License, or
  8427. + (at your option) any later version.
  8428. +
  8429. + This program is distributed in the hope that it will be useful,
  8430. + but WITHOUT ANY WARRANTY; without even the implied warranty of
  8431. + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  8432. + GNU General Public License for more details.
  8433. +
  8434. + You should have received a copy of the GNU General Public License
  8435. + along with this program; if not, write to the Free Software
  8436. + Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. */
  8437. +
  8438. +#ifndef GCC_AVR32_H
  8439. +#define GCC_AVR32_H
  8440. +
  8441. +
  8442. +#ifndef OBJECT_FORMAT_ELF
  8443. +#error avr32.h included before elfos.h
  8444. +#endif
  8445. +
  8446. +#ifndef LOCAL_LABEL_PREFIX
  8447. +#define LOCAL_LABEL_PREFIX "."
  8448. +#endif
  8449. +
  8450. +#ifndef SUBTARGET_CPP_SPEC
  8451. +#define SUBTARGET_CPP_SPEC "-D__ELF__"
  8452. +#endif
  8453. +
  8454. +
  8455. +extern struct rtx_def *avr32_compare_op0;
  8456. +extern struct rtx_def *avr32_compare_op1;
  8457. +
  8458. +/* comparison type */
  8459. +enum avr32_cmp_type {
  8460. + CMP_QI, /* 1 byte ->char */
  8461. + CMP_HI, /* 2 byte->half word */
  8462. + CMP_SI, /* four byte->word*/
  8463. + CMP_DI, /* eight byte->double word */
  8464. + CMP_SF, /* single precision floats */
  8465. + CMP_MAX /* max comparison type */
  8466. +};
  8467. +
  8468. +extern enum avr32_cmp_type avr32_branch_type; /* type of branch to use */
  8469. +
  8470. +
  8471. +extern struct rtx_def *avr32_acc_cache;
  8472. +
  8473. +/* cache instruction op5 codes */
  8474. +#define AVR32_CACHE_INVALIDATE_ICACHE 1
  8475. +
  8476. +/*
  8477. +These bits describe the different types of function supported by the AVR32
  8478. +backend. They are exclusive, e.g. a function cannot be both a normal function
  8479. +and an interworked function. Knowing the type of a function is important for
  8480. +determining its prologue and epilogue sequences. Note value 7 is currently
  8481. +unassigned. Also note that the interrupt function types all have bit 2 set,
  8482. +so that they can be tested for easily. Note that 0 is deliberately chosen for
  8483. +AVR32_FT_UNKNOWN so that when the machine_function structure is initialized
  8484. +(to zero) func_type will default to unknown. This will force the first use of
  8485. +avr32_current_func_type to call avr32_compute_func_type.
  8486. +*/
  8487. +#define AVR32_FT_UNKNOWN 0 /* Type has not yet been determined. */
  8488. +#define AVR32_FT_NORMAL 1 /* Normal function. */
  8489. +#define AVR32_FT_ACALL 2 /* An acall function. */
  8490. +#define AVR32_FT_EXCEPTION_HANDLER 3 /* A C++ exception handler. */
  8491. +#define AVR32_FT_ISR_FULL 4 /* A fully shadowed interrupt mode. */
  8492. +#define AVR32_FT_ISR_HALF 5 /* A half shadowed interrupt mode. */
  8493. +#define AVR32_FT_ISR_NONE 6 /* No shadow registers. */
  8494. +
  8495. +#define AVR32_FT_TYPE_MASK ((1 << 3) - 1)
  8496. +
  8497. +/* In addition functions can have several type modifiers, outlined by these bit masks: */
  8498. +#define AVR32_FT_INTERRUPT (1 << 2) /* Note overlap with FT_ISR and above. */
  8499. +#define AVR32_FT_NAKED (1 << 3) /* No prologue or epilogue. */
  8500. +#define AVR32_FT_VOLATILE (1 << 4) /* Does not return. */
  8501. +#define AVR32_FT_NESTED (1 << 5) /* Embedded inside another func. */
  8502. +#define AVR32_FT_FLASHVAULT (1 << 6) /* Flashvault function call. */
  8503. +#define AVR32_FT_FLASHVAULT_IMPL (1 << 7) /* Function definition in FlashVault. */
  8504. +
  8505. +
  8506. +/* Some macros to test these flags. */
  8507. +#define AVR32_FUNC_TYPE(t) (t & AVR32_FT_TYPE_MASK)
  8508. +#define IS_INTERRUPT(t) (t & AVR32_FT_INTERRUPT)
  8509. +#define IS_NAKED(t) (t & AVR32_FT_NAKED)
  8510. +#define IS_VOLATILE(t) (t & AVR32_FT_VOLATILE)
  8511. +#define IS_NESTED(t) (t & AVR32_FT_NESTED)
  8512. +#define IS_FLASHVAULT(t) (t & AVR32_FT_FLASHVAULT)
  8513. +#define IS_FLASHVAULT_IMPL(t) (t & AVR32_FT_FLASHVAULT_IMPL)
  8514. +
  8515. +#define SYMBOL_FLAG_RMW_ADDR_SHIFT SYMBOL_FLAG_MACH_DEP_SHIFT
  8516. +#define SYMBOL_REF_RMW_ADDR(RTX) \
  8517. + ((SYMBOL_REF_FLAGS (RTX) & (1 << SYMBOL_FLAG_RMW_ADDR_SHIFT)) != 0)
  8518. +
  8519. +
  8520. +typedef struct minipool_labels
  8521. +GTY ((chain_next ("%h.next"), chain_prev ("%h.prev")))
  8522. +{
  8523. + rtx label;
  8524. + struct minipool_labels *prev;
  8525. + struct minipool_labels *next;
  8526. +} minipool_labels;
  8527. +
  8528. +/* A C structure for machine-specific, per-function data.
  8529. + This is added to the cfun structure. */
  8530. +
  8531. +typedef struct machine_function
  8532. +GTY (())
  8533. +{
  8534. + /* Records the type of the current function. */
  8535. + unsigned long func_type;
  8536. + /* List of minipool labels, use for checking if code label is valid in a
  8537. + memory expression */
  8538. + minipool_labels *minipool_label_head;
  8539. + minipool_labels *minipool_label_tail;
  8540. + int ifcvt_after_reload;
  8541. +} machine_function;
  8542. +
  8543. +/* Initialize data used by insn expanders. This is called from insn_emit,
  8544. + once for every function before code is generated. */
  8545. +#define INIT_EXPANDERS avr32_init_expanders ()
  8546. +
  8547. +/******************************************************************************
  8548. + * SPECS
  8549. + *****************************************************************************/
  8550. +
  8551. +#ifndef ASM_SPEC
  8552. +#define ASM_SPEC "%{fpic:--pic} %{mrelax|O*:%{mno-relax|O0|O1: ;:--linkrelax}} %{march=ucr2nomul:-march=ucr2;:%{march=*:-march=%*}} %{mpart=uc3a3revd:-mpart=uc3a3256s;:%{mpart=*:-mpart=%*}}"
  8553. +#endif
  8554. +
  8555. +#ifndef MULTILIB_DEFAULTS
  8556. +#define MULTILIB_DEFAULTS { "march=ap", "" }
  8557. +#endif
  8558. +
  8559. +/******************************************************************************
  8560. + * Run-time Target Specification
  8561. + *****************************************************************************/
  8562. +#ifndef TARGET_VERSION
  8563. +#define TARGET_VERSION fprintf(stderr, " (AVR32, GNU assembler syntax)");
  8564. +#endif
  8565. +
  8566. +
  8567. +/* Part types. Keep this in sync with the order of avr32_part_types in avr32.c*/
  8568. +enum part_type
  8569. +{
  8570. + PART_TYPE_AVR32_NONE,
  8571. + PART_TYPE_AVR32_AP7000,
  8572. + PART_TYPE_AVR32_AP7001,
  8573. + PART_TYPE_AVR32_AP7002,
  8574. + PART_TYPE_AVR32_AP7200,
  8575. + PART_TYPE_AVR32_UC3A0128,
  8576. + PART_TYPE_AVR32_UC3A0256,
  8577. + PART_TYPE_AVR32_UC3A0512,
  8578. + PART_TYPE_AVR32_UC3A0512ES,
  8579. + PART_TYPE_AVR32_UC3A1128,
  8580. + PART_TYPE_AVR32_UC3A1256,
  8581. + PART_TYPE_AVR32_UC3A1512,
  8582. + PART_TYPE_AVR32_UC3A1512ES,
  8583. + PART_TYPE_AVR32_UC3A3REVD,
  8584. + PART_TYPE_AVR32_UC3A364,
  8585. + PART_TYPE_AVR32_UC3A364S,
  8586. + PART_TYPE_AVR32_UC3A3128,
  8587. + PART_TYPE_AVR32_UC3A3128S,
  8588. + PART_TYPE_AVR32_UC3A3256,
  8589. + PART_TYPE_AVR32_UC3A3256S,
  8590. + PART_TYPE_AVR32_UC3A464,
  8591. + PART_TYPE_AVR32_UC3A464S,
  8592. + PART_TYPE_AVR32_UC3A4128,
  8593. + PART_TYPE_AVR32_UC3A4128S,
  8594. + PART_TYPE_AVR32_UC3A4256,
  8595. + PART_TYPE_AVR32_UC3A4256S,
  8596. + PART_TYPE_AVR32_UC3B064,
  8597. + PART_TYPE_AVR32_UC3B0128,
  8598. + PART_TYPE_AVR32_UC3B0256,
  8599. + PART_TYPE_AVR32_UC3B0256ES,
  8600. + PART_TYPE_AVR32_UC3B0512,
  8601. + PART_TYPE_AVR32_UC3B0512REVC,
  8602. + PART_TYPE_AVR32_UC3B164,
  8603. + PART_TYPE_AVR32_UC3B1128,
  8604. + PART_TYPE_AVR32_UC3B1256,
  8605. + PART_TYPE_AVR32_UC3B1256ES,
  8606. + PART_TYPE_AVR32_UC3B1512,
  8607. + PART_TYPE_AVR32_UC3B1512REVC,
  8608. + PART_TYPE_AVR32_UC64D3,
  8609. + PART_TYPE_AVR32_UC128D3,
  8610. + PART_TYPE_AVR32_UC64D4,
  8611. + PART_TYPE_AVR32_UC128D4,
  8612. + PART_TYPE_AVR32_UC3C0512CREVC,
  8613. + PART_TYPE_AVR32_UC3C1512CREVC,
  8614. + PART_TYPE_AVR32_UC3C2512CREVC,
  8615. + PART_TYPE_AVR32_UC3L0256,
  8616. + PART_TYPE_AVR32_UC3L0128,
  8617. + PART_TYPE_AVR32_UC3L064,
  8618. + PART_TYPE_AVR32_UC3L032,
  8619. + PART_TYPE_AVR32_UC3L016,
  8620. + PART_TYPE_AVR32_UC3L064REVB,
  8621. + PART_TYPE_AVR32_UC64L3U,
  8622. + PART_TYPE_AVR32_UC128L3U,
  8623. + PART_TYPE_AVR32_UC256L3U,
  8624. + PART_TYPE_AVR32_UC64L4U,
  8625. + PART_TYPE_AVR32_UC128L4U,
  8626. + PART_TYPE_AVR32_UC256L4U,
  8627. + PART_TYPE_AVR32_UC3C064C,
  8628. + PART_TYPE_AVR32_UC3C0128C,
  8629. + PART_TYPE_AVR32_UC3C0256C,
  8630. + PART_TYPE_AVR32_UC3C0512C,
  8631. + PART_TYPE_AVR32_UC3C164C,
  8632. + PART_TYPE_AVR32_UC3C1128C,
  8633. + PART_TYPE_AVR32_UC3C1256C,
  8634. + PART_TYPE_AVR32_UC3C1512C,
  8635. + PART_TYPE_AVR32_UC3C264C,
  8636. + PART_TYPE_AVR32_UC3C2128C,
  8637. + PART_TYPE_AVR32_UC3C2256C,
  8638. + PART_TYPE_AVR32_UC3C2512C,
  8639. + PART_TYPE_AVR32_MXT768E
  8640. +};
  8641. +
  8642. +/* Microarchitectures. */
  8643. +enum microarchitecture_type
  8644. +{
  8645. + UARCH_TYPE_AVR32A,
  8646. + UARCH_TYPE_AVR32B,
  8647. + UARCH_TYPE_NONE
  8648. +};
  8649. +
  8650. +/* Architectures types which specifies the pipeline.
  8651. + Keep this in sync with avr32_arch_types in avr32.c
  8652. + and the pipeline attribute in avr32.md */
  8653. +enum architecture_type
  8654. +{
  8655. + ARCH_TYPE_AVR32_AP,
  8656. + ARCH_TYPE_AVR32_UCR1,
  8657. + ARCH_TYPE_AVR32_UCR2,
  8658. + ARCH_TYPE_AVR32_UCR2NOMUL,
  8659. + ARCH_TYPE_AVR32_UCR3,
  8660. + ARCH_TYPE_AVR32_UCR3FP,
  8661. + ARCH_TYPE_AVR32_NONE
  8662. +};
  8663. +
  8664. +/* Flag specifying if the cpu has support for DSP instructions.*/
  8665. +#define FLAG_AVR32_HAS_DSP (1 << 0)
  8666. +/* Flag specifying if the cpu has support for Read-Modify-Write
  8667. + instructions.*/
  8668. +#define FLAG_AVR32_HAS_RMW (1 << 1)
  8669. +/* Flag specifying if the cpu has support for SIMD instructions. */
  8670. +#define FLAG_AVR32_HAS_SIMD (1 << 2)
  8671. +/* Flag specifying if the cpu has support for unaligned memory word access. */
  8672. +#define FLAG_AVR32_HAS_UNALIGNED_WORD (1 << 3)
  8673. +/* Flag specifying if the cpu has support for branch prediction. */
  8674. +#define FLAG_AVR32_HAS_BRANCH_PRED (1 << 4)
  8675. +/* Flag specifying if the cpu has support for a return stack. */
  8676. +#define FLAG_AVR32_HAS_RETURN_STACK (1 << 5)
  8677. +/* Flag specifying if the cpu has caches. */
  8678. +#define FLAG_AVR32_HAS_CACHES (1 << 6)
  8679. +/* Flag specifying if the cpu has support for v2 insns. */
  8680. +#define FLAG_AVR32_HAS_V2_INSNS (1 << 7)
  8681. +/* Flag specifying that the cpu has buggy mul insns. */
  8682. +#define FLAG_AVR32_HAS_NO_MUL_INSNS (1 << 8)
  8683. +/* Flag specifying that the device has FPU instructions according
  8684. + to AVR32002 specifications*/
  8685. +#define FLAG_AVR32_HAS_FPU (1 << 9)
  8686. +
  8687. +/* Structure for holding information about different avr32 CPUs/parts */
  8688. +struct part_type_s
  8689. +{
  8690. + const char *const name;
  8691. + enum part_type part_type;
  8692. + enum architecture_type arch_type;
  8693. + /* Must lie outside user's namespace. NULL == no macro. */
  8694. + const char *const macro;
  8695. +};
  8696. +
  8697. +/* Structure for holding information about different avr32 pipeline
  8698. + architectures. */
  8699. +struct arch_type_s
  8700. +{
  8701. + const char *const name;
  8702. + enum architecture_type arch_type;
  8703. + enum microarchitecture_type uarch_type;
  8704. + const unsigned long feature_flags;
  8705. + /* Must lie outside user's namespace. NULL == no macro. */
  8706. + const char *const macro;
  8707. +};
  8708. +
  8709. +extern const struct part_type_s *avr32_part;
  8710. +extern const struct arch_type_s *avr32_arch;
  8711. +
  8712. +#define TARGET_SIMD (avr32_arch->feature_flags & FLAG_AVR32_HAS_SIMD)
  8713. +#define TARGET_DSP (avr32_arch->feature_flags & FLAG_AVR32_HAS_DSP)
  8714. +#define TARGET_RMW (avr32_arch->feature_flags & FLAG_AVR32_HAS_RMW)
  8715. +#define TARGET_UNALIGNED_WORD (avr32_arch->feature_flags & FLAG_AVR32_HAS_UNALIGNED_WORD)
  8716. +#define TARGET_BRANCH_PRED (avr32_arch->feature_flags & FLAG_AVR32_HAS_BRANCH_PRED)
  8717. +#define TARGET_RETURN_STACK (avr32_arch->feature_flags & FLAG_AVR32_HAS_RETURN_STACK)
  8718. +#define TARGET_V2_INSNS (avr32_arch->feature_flags & FLAG_AVR32_HAS_V2_INSNS)
  8719. +#define TARGET_CACHES (avr32_arch->feature_flags & FLAG_AVR32_HAS_CACHES)
  8720. +#define TARGET_NO_MUL_INSNS (avr32_arch->feature_flags & FLAG_AVR32_HAS_NO_MUL_INSNS)
  8721. +#define TARGET_ARCH_AP (avr32_arch->arch_type == ARCH_TYPE_AVR32_AP)
  8722. +#define TARGET_ARCH_UCR1 (avr32_arch->arch_type == ARCH_TYPE_AVR32_UCR1)
  8723. +#define TARGET_ARCH_UCR2 (avr32_arch->arch_type == ARCH_TYPE_AVR32_UCR2)
  8724. +#define TARGET_ARCH_UC (TARGET_ARCH_UCR1 || TARGET_ARCH_UCR2)
  8725. +#define TARGET_UARCH_AVR32A (avr32_arch->uarch_type == UARCH_TYPE_AVR32A)
  8726. +#define TARGET_UARCH_AVR32B (avr32_arch->uarch_type == UARCH_TYPE_AVR32B)
  8727. +#define TARGET_ARCH_FPU (avr32_arch->feature_flags & FLAG_AVR32_HAS_FPU)
  8728. +
  8729. +#define CAN_DEBUG_WITHOUT_FP
  8730. +
  8731. +
  8732. +
  8733. +
  8734. +/******************************************************************************
  8735. + * Storage Layout
  8736. + *****************************************************************************/
  8737. +
  8738. +/*
  8739. +Define this macro to have the value 1 if the most significant bit in a
  8740. +byte has the lowest number; otherwise define it to have the value zero.
  8741. +This means that bit-field instructions count from the most significant
  8742. +bit. If the machine has no bit-field instructions, then this must still
  8743. +be defined, but it doesn't matter which value it is defined to. This
  8744. +macro need not be a constant.
  8745. +
  8746. +This macro does not affect the way structure fields are packed into
  8747. +bytes or words; that is controlled by BYTES_BIG_ENDIAN.
  8748. +*/
  8749. +#define BITS_BIG_ENDIAN 0
  8750. +
  8751. +/*
  8752. +Define this macro to have the value 1 if the most significant byte in a
  8753. +word has the lowest number. This macro need not be a constant.
  8754. +*/
  8755. +/*
  8756. + Data is stored in an big-endian way.
  8757. +*/
  8758. +#define BYTES_BIG_ENDIAN 1
  8759. +
  8760. +/*
  8761. +Define this macro to have the value 1 if, in a multiword object, the
  8762. +most significant word has the lowest number. This applies to both
  8763. +memory locations and registers; GCC fundamentally assumes that the
  8764. +order of words in memory is the same as the order in registers. This
  8765. +macro need not be a constant.
  8766. +*/
  8767. +/*
  8768. + Data is stored in an bin-endian way.
  8769. +*/
  8770. +#define WORDS_BIG_ENDIAN 1
  8771. +
  8772. +/*
  8773. +Define this macro if WORDS_BIG_ENDIAN is not constant. This must be a
  8774. +constant value with the same meaning as WORDS_BIG_ENDIAN, which will be
  8775. +used only when compiling libgcc2.c. Typically the value will be set
  8776. +based on preprocessor defines.
  8777. +*/
  8778. +#define LIBGCC2_WORDS_BIG_ENDIAN WORDS_BIG_ENDIAN
  8779. +
  8780. +/*
  8781. +Define this macro to have the value 1 if DFmode, XFmode or
  8782. +TFmode floating point numbers are stored in memory with the word
  8783. +containing the sign bit at the lowest address; otherwise define it to
  8784. +have the value 0. This macro need not be a constant.
  8785. +
  8786. +You need not define this macro if the ordering is the same as for
  8787. +multi-word integers.
  8788. +*/
  8789. +/* #define FLOAT_WORDS_BIG_ENDIAN 1 */
  8790. +
  8791. +/*
  8792. +Define this macro to be the number of bits in an addressable storage
  8793. +unit (byte); normally 8.
  8794. +*/
  8795. +#define BITS_PER_UNIT 8
  8796. +
  8797. +/*
  8798. +Number of bits in a word; normally 32.
  8799. +*/
  8800. +#define BITS_PER_WORD 32
  8801. +
  8802. +/*
  8803. +Maximum number of bits in a word. If this is undefined, the default is
  8804. +BITS_PER_WORD. Otherwise, it is the constant value that is the
  8805. +largest value that BITS_PER_WORD can have at run-time.
  8806. +*/
  8807. +/* MAX_BITS_PER_WORD not defined*/
  8808. +
  8809. +/*
  8810. +Number of storage units in a word; normally 4.
  8811. +*/
  8812. +#define UNITS_PER_WORD 4
  8813. +
  8814. +/*
  8815. +Minimum number of units in a word. If this is undefined, the default is
  8816. +UNITS_PER_WORD. Otherwise, it is the constant value that is the
  8817. +smallest value that UNITS_PER_WORD can have at run-time.
  8818. +*/
  8819. +/* MIN_UNITS_PER_WORD not defined */
  8820. +
  8821. +/*
  8822. +Width of a pointer, in bits. You must specify a value no wider than the
  8823. +width of Pmode. If it is not equal to the width of Pmode,
  8824. +you must define POINTERS_EXTEND_UNSIGNED.
  8825. +*/
  8826. +#define POINTER_SIZE 32
  8827. +
  8828. +/*
  8829. +A C expression whose value is greater than zero if pointers that need to be
  8830. +extended from being POINTER_SIZE bits wide to Pmode are to
  8831. +be zero-extended and zero if they are to be sign-extended. If the value
  8832. +is less then zero then there must be an "ptr_extend" instruction that
  8833. +extends a pointer from POINTER_SIZE to Pmode.
  8834. +
  8835. +You need not define this macro if the POINTER_SIZE is equal
  8836. +to the width of Pmode.
  8837. +*/
  8838. +/* #define POINTERS_EXTEND_UNSIGNED */
  8839. +
  8840. +/*
  8841. +A Macro to update M and UNSIGNEDP when an object whose type
  8842. +is TYPE and which has the specified mode and signedness is to be
  8843. +stored in a register. This macro is only called when TYPE is a
  8844. +scalar type.
  8845. +
  8846. +On most RISC machines, which only have operations that operate on a full
  8847. +register, define this macro to set M to word_mode if
  8848. +M is an integer mode narrower than BITS_PER_WORD. In most
  8849. +cases, only integer modes should be widened because wider-precision
  8850. +floating-point operations are usually more expensive than their narrower
  8851. +counterparts.
  8852. +
  8853. +For most machines, the macro definition does not change UNSIGNEDP.
  8854. +However, some machines, have instructions that preferentially handle
  8855. +either signed or unsigned quantities of certain modes. For example, on
  8856. +the DEC Alpha, 32-bit loads from memory and 32-bit add instructions
  8857. +sign-extend the result to 64 bits. On such machines, set
  8858. +UNSIGNEDP according to which kind of extension is more efficient.
  8859. +
  8860. +Do not define this macro if it would never modify M.
  8861. +*/
  8862. +#define PROMOTE_MODE(M, UNSIGNEDP, TYPE) \
  8863. + { \
  8864. + if (!AGGREGATE_TYPE_P (TYPE) \
  8865. + && GET_MODE_CLASS (mode) == MODE_INT \
  8866. + && GET_MODE_SIZE (mode) < 4) \
  8867. + { \
  8868. + if (M == QImode) \
  8869. + (UNSIGNEDP) = 1; \
  8870. + else if (M == HImode) \
  8871. + (UNSIGNEDP) = 0; \
  8872. + (M) = SImode; \
  8873. + } \
  8874. + }
  8875. +
  8876. +#define PROMOTE_FUNCTION_MODE(M, UNSIGNEDP, TYPE) \
  8877. + PROMOTE_MODE(M, UNSIGNEDP, TYPE)
  8878. +
  8879. +/* Define if operations between registers always perform the operation
  8880. + on the full register even if a narrower mode is specified. */
  8881. +#define WORD_REGISTER_OPERATIONS
  8882. +
  8883. +/* Define if loading in MODE, an integral mode narrower than BITS_PER_WORD
  8884. + will either zero-extend or sign-extend. The value of this macro should
  8885. + be the code that says which one of the two operations is implicitly
  8886. + done, UNKNOWN if not known. */
  8887. +#define LOAD_EXTEND_OP(MODE) \
  8888. + (((MODE) == QImode) ? ZERO_EXTEND \
  8889. + : ((MODE) == HImode) ? SIGN_EXTEND : UNKNOWN)
  8890. +
  8891. +
  8892. +/*
  8893. +Normal alignment required for function parameters on the stack, in
  8894. +bits. All stack parameters receive at least this much alignment
  8895. +regardless of data type. On most machines, this is the same as the
  8896. +size of an integer.
  8897. +*/
  8898. +#define PARM_BOUNDARY 32
  8899. +
  8900. +/*
  8901. +Define this macro to the minimum alignment enforced by hardware for the
  8902. +stack pointer on this machine. The definition is a C expression for the
  8903. +desired alignment (measured in bits). This value is used as a default
  8904. +if PREFERRED_STACK_BOUNDARY is not defined. On most machines,
  8905. +this should be the same as PARM_BOUNDARY.
  8906. +*/
  8907. +#define STACK_BOUNDARY 32
  8908. +
  8909. +/*
  8910. +Define this macro if you wish to preserve a certain alignment for the
  8911. +stack pointer, greater than what the hardware enforces. The definition
  8912. +is a C expression for the desired alignment (measured in bits). This
  8913. +macro must evaluate to a value equal to or larger than
  8914. +STACK_BOUNDARY.
  8915. +*/
  8916. +#define PREFERRED_STACK_BOUNDARY (TARGET_FORCE_DOUBLE_ALIGN ? 64 : 32 )
  8917. +
  8918. +/*
  8919. +Alignment required for a function entry point, in bits.
  8920. +*/
  8921. +#define FUNCTION_BOUNDARY 16
  8922. +
  8923. +/*
  8924. +Biggest alignment that any data type can require on this machine, in bits.
  8925. +*/
  8926. +#define BIGGEST_ALIGNMENT (TARGET_FORCE_DOUBLE_ALIGN ? 64 : 32 )
  8927. +
  8928. +/*
  8929. +If defined, the smallest alignment, in bits, that can be given to an
  8930. +object that can be referenced in one operation, without disturbing any
  8931. +nearby object. Normally, this is BITS_PER_UNIT, but may be larger
  8932. +on machines that don't have byte or half-word store operations.
  8933. +*/
  8934. +#define MINIMUM_ATOMIC_ALIGNMENT BITS_PER_UNIT
  8935. +
  8936. +
  8937. +/*
  8938. +An integer expression for the size in bits of the largest integer machine mode that
  8939. +should actually be used. All integer machine modes of this size or smaller can be
  8940. +used for structures and unions with the appropriate sizes. If this macro is undefined,
  8941. +GET_MODE_BITSIZE (DImode) is assumed.*/
  8942. +#define MAX_FIXED_MODE_SIZE GET_MODE_BITSIZE (DImode)
  8943. +
  8944. +
  8945. +/*
  8946. +If defined, a C expression to compute the alignment given to a constant
  8947. +that is being placed in memory. CONSTANT is the constant and
  8948. +BASIC_ALIGN is the alignment that the object would ordinarily
  8949. +have. The value of this macro is used instead of that alignment to
  8950. +align the object.
  8951. +
  8952. +If this macro is not defined, then BASIC_ALIGN is used.
  8953. +
  8954. +The typical use of this macro is to increase alignment for string
  8955. +constants to be word aligned so that strcpy calls that copy
  8956. +constants can be done inline.
  8957. +*/
  8958. +#define CONSTANT_ALIGNMENT(CONSTANT, BASIC_ALIGN) \
  8959. + ((TREE_CODE(CONSTANT) == STRING_CST) ? BITS_PER_WORD : BASIC_ALIGN)
  8960. +
  8961. +/* Try to align string to a word. */
  8962. +#define DATA_ALIGNMENT(TYPE, ALIGN) \
  8963. + ({(TREE_CODE (TYPE) == ARRAY_TYPE \
  8964. + && TYPE_MODE (TREE_TYPE (TYPE)) == QImode \
  8965. + && (ALIGN) < BITS_PER_WORD ? BITS_PER_WORD : (ALIGN));})
  8966. +
  8967. +/* Try to align local store strings to a word. */
  8968. +#define LOCAL_ALIGNMENT(TYPE, ALIGN) \
  8969. + ({(TREE_CODE (TYPE) == ARRAY_TYPE \
  8970. + && TYPE_MODE (TREE_TYPE (TYPE)) == QImode \
  8971. + && (ALIGN) < BITS_PER_WORD ? BITS_PER_WORD : (ALIGN));})
  8972. +
  8973. +/*
  8974. +Define this macro to be the value 1 if instructions will fail to work
  8975. +if given data not on the nominal alignment. If instructions will merely
  8976. +go slower in that case, define this macro as 0.
  8977. +*/
  8978. +#define STRICT_ALIGNMENT 1
  8979. +
  8980. +/*
  8981. +Define this if you wish to imitate the way many other C compilers handle
  8982. +alignment of bit-fields and the structures that contain them.
  8983. +
  8984. +The behavior is that the type written for a bit-field (int,
  8985. +short, or other integer type) imposes an alignment for the
  8986. +entire structure, as if the structure really did contain an ordinary
  8987. +field of that type. In addition, the bit-field is placed within the
  8988. +structure so that it would fit within such a field, not crossing a
  8989. +boundary for it.
  8990. +
  8991. +Thus, on most machines, a bit-field whose type is written as int
  8992. +would not cross a four-byte boundary, and would force four-byte
  8993. +alignment for the whole structure. (The alignment used may not be four
  8994. +bytes; it is controlled by the other alignment parameters.)
  8995. +
  8996. +If the macro is defined, its definition should be a C expression;
  8997. +a nonzero value for the expression enables this behavior.
  8998. +
  8999. +Note that if this macro is not defined, or its value is zero, some
  9000. +bit-fields may cross more than one alignment boundary. The compiler can
  9001. +support such references if there are insv, extv, and
  9002. +extzv insns that can directly reference memory.
  9003. +
  9004. +The other known way of making bit-fields work is to define
  9005. +STRUCTURE_SIZE_BOUNDARY as large as BIGGEST_ALIGNMENT.
  9006. +Then every structure can be accessed with fullwords.
  9007. +
  9008. +Unless the machine has bit-field instructions or you define
  9009. +STRUCTURE_SIZE_BOUNDARY that way, you must define
  9010. +PCC_BITFIELD_TYPE_MATTERS to have a nonzero value.
  9011. +
  9012. +If your aim is to make GCC use the same conventions for laying out
  9013. +bit-fields as are used by another compiler, here is how to investigate
  9014. +what the other compiler does. Compile and run this program:
  9015. +
  9016. +struct foo1
  9017. +{
  9018. + char x;
  9019. + char :0;
  9020. + char y;
  9021. +};
  9022. +
  9023. +struct foo2
  9024. +{
  9025. + char x;
  9026. + int :0;
  9027. + char y;
  9028. +};
  9029. +
  9030. +main ()
  9031. +{
  9032. + printf ("Size of foo1 is %d\n",
  9033. + sizeof (struct foo1));
  9034. + printf ("Size of foo2 is %d\n",
  9035. + sizeof (struct foo2));
  9036. + exit (0);
  9037. +}
  9038. +
  9039. +If this prints 2 and 5, then the compiler's behavior is what you would
  9040. +get from PCC_BITFIELD_TYPE_MATTERS.
  9041. +*/
  9042. +#define PCC_BITFIELD_TYPE_MATTERS 1
  9043. +
  9044. +
  9045. +/******************************************************************************
  9046. + * Layout of Source Language Data Types
  9047. + *****************************************************************************/
  9048. +
  9049. +/*
  9050. +A C expression for the size in bits of the type int on the
  9051. +target machine. If you don't define this, the default is one word.
  9052. +*/
  9053. +#define INT_TYPE_SIZE 32
  9054. +
  9055. +/*
  9056. +A C expression for the size in bits of the type short on the
  9057. +target machine. If you don't define this, the default is half a word. (If
  9058. +this would be less than one storage unit, it is rounded up to one unit.)
  9059. +*/
  9060. +#define SHORT_TYPE_SIZE 16
  9061. +
  9062. +/*
  9063. +A C expression for the size in bits of the type long on the
  9064. +target machine. If you don't define this, the default is one word.
  9065. +*/
  9066. +#define LONG_TYPE_SIZE 32
  9067. +
  9068. +
  9069. +/*
  9070. +A C expression for the size in bits of the type long long on the
  9071. +target machine. If you don't define this, the default is two
  9072. +words. If you want to support GNU Ada on your machine, the value of this
  9073. +macro must be at least 64.
  9074. +*/
  9075. +#define LONG_LONG_TYPE_SIZE 64
  9076. +
  9077. +/*
  9078. +A C expression for the size in bits of the type char on the
  9079. +target machine. If you don't define this, the default is
  9080. +BITS_PER_UNIT.
  9081. +*/
  9082. +#define CHAR_TYPE_SIZE 8
  9083. +
  9084. +
  9085. +/*
  9086. +A C expression for the size in bits of the C++ type bool and
  9087. +C99 type _Bool on the target machine. If you don't define
  9088. +this, and you probably shouldn't, the default is CHAR_TYPE_SIZE.
  9089. +*/
  9090. +#define BOOL_TYPE_SIZE 8
  9091. +
  9092. +
  9093. +/*
  9094. +An expression whose value is 1 or 0, according to whether the type
  9095. +char should be signed or unsigned by default. The user can
  9096. +always override this default with the options -fsigned-char
  9097. +and -funsigned-char.
  9098. +*/
  9099. +/* We are using unsigned char */
  9100. +#define DEFAULT_SIGNED_CHAR 0
  9101. +
  9102. +
  9103. +/*
  9104. +A C expression for a string describing the name of the data type to use
  9105. +for size values. The typedef name size_t is defined using the
  9106. +contents of the string.
  9107. +
  9108. +The string can contain more than one keyword. If so, separate them with
  9109. +spaces, and write first any length keyword, then unsigned if
  9110. +appropriate, and finally int. The string must exactly match one
  9111. +of the data type names defined in the function
  9112. +init_decl_processing in the file c-decl.c. You may not
  9113. +omit int or change the order - that would cause the compiler to
  9114. +crash on startup.
  9115. +
  9116. +If you don't define this macro, the default is "long unsigned int".
  9117. +*/
  9118. +#define SIZE_TYPE "long unsigned int"
  9119. +
  9120. +/*
  9121. +A C expression for a string describing the name of the data type to use
  9122. +for the result of subtracting two pointers. The typedef name
  9123. +ptrdiff_t is defined using the contents of the string. See
  9124. +SIZE_TYPE above for more information.
  9125. +
  9126. +If you don't define this macro, the default is "long int".
  9127. +*/
  9128. +#define PTRDIFF_TYPE "long int"
  9129. +
  9130. +
  9131. +/*
  9132. +A C expression for the size in bits of the data type for wide
  9133. +characters. This is used in cpp, which cannot make use of
  9134. +WCHAR_TYPE.
  9135. +*/
  9136. +#define WCHAR_TYPE_SIZE 32
  9137. +
  9138. +
  9139. +/*
  9140. +A C expression for a string describing the name of the data type to
  9141. +use for wide characters passed to printf and returned from
  9142. +getwc. The typedef name wint_t is defined using the
  9143. +contents of the string. See SIZE_TYPE above for more
  9144. +information.
  9145. +
  9146. +If you don't define this macro, the default is "unsigned int".
  9147. +*/
  9148. +#define WINT_TYPE "unsigned int"
  9149. +
  9150. +/*
  9151. +A C expression for a string describing the name of the data type that
  9152. +can represent any value of any standard or extended signed integer type.
  9153. +The typedef name intmax_t is defined using the contents of the
  9154. +string. See SIZE_TYPE above for more information.
  9155. +
  9156. +If you don't define this macro, the default is the first of
  9157. +"int", "long int", or "long long int" that has as
  9158. +much precision as long long int.
  9159. +*/
  9160. +#define INTMAX_TYPE "long long int"
  9161. +
  9162. +/*
  9163. +A C expression for a string describing the name of the data type that
  9164. +can represent any value of any standard or extended unsigned integer
  9165. +type. The typedef name uintmax_t is defined using the contents
  9166. +of the string. See SIZE_TYPE above for more information.
  9167. +
  9168. +If you don't define this macro, the default is the first of
  9169. +"unsigned int", "long unsigned int", or "long long unsigned int"
  9170. +that has as much precision as long long unsigned int.
  9171. +*/
  9172. +#define UINTMAX_TYPE "long long unsigned int"
  9173. +
  9174. +
  9175. +/******************************************************************************
  9176. + * Register Usage
  9177. + *****************************************************************************/
  9178. +
  9179. +/* Convert from gcc internal register number to register number
  9180. + used in assembly code */
  9181. +#define ASM_REGNUM(reg) (LAST_REGNUM - (reg))
  9182. +
  9183. +/* Convert between register number used in assembly to gcc
  9184. + internal register number */
  9185. +#define INTERNAL_REGNUM(reg) (LAST_REGNUM - (reg))
  9186. +
  9187. +/** Basic Characteristics of Registers **/
  9188. +
  9189. +/*
  9190. +Number of hardware registers known to the compiler. They receive
  9191. +numbers 0 through FIRST_PSEUDO_REGISTER-1; thus, the first
  9192. +pseudo register's number really is assigned the number
  9193. +FIRST_PSEUDO_REGISTER.
  9194. +*/
  9195. +#define FIRST_PSEUDO_REGISTER (LAST_REGNUM + 1)
  9196. +
  9197. +#define FIRST_REGNUM 0
  9198. +#define LAST_REGNUM 15
  9199. +
  9200. +/*
  9201. +An initializer that says which registers are used for fixed purposes
  9202. +all throughout the compiled code and are therefore not available for
  9203. +general allocation. These would include the stack pointer, the frame
  9204. +pointer (except on machines where that can be used as a general
  9205. +register when no frame pointer is needed), the program counter on
  9206. +machines where that is considered one of the addressable registers,
  9207. +and any other numbered register with a standard use.
  9208. +
  9209. +This information is expressed as a sequence of numbers, separated by
  9210. +commas and surrounded by braces. The nth number is 1 if
  9211. +register n is fixed, 0 otherwise.
  9212. +
  9213. +The table initialized from this macro, and the table initialized by
  9214. +the following one, may be overridden at run time either automatically,
  9215. +by the actions of the macro CONDITIONAL_REGISTER_USAGE, or by
  9216. +the user with the command options -ffixed-[reg],
  9217. +-fcall-used-[reg] and -fcall-saved-[reg].
  9218. +*/
  9219. +
  9220. +/* The internal gcc register numbers are reversed
  9221. + compared to the real register numbers since
  9222. + gcc expects data types stored over multiple
  9223. + registers in the register file to be big endian
  9224. + if the memory layout is big endian. But this
  9225. + is not the case for avr32 so we fake a big
  9226. + endian register file. */
  9227. +
  9228. +#define FIXED_REGISTERS { \
  9229. + 1, /* Program Counter */ \
  9230. + 0, /* Link Register */ \
  9231. + 1, /* Stack Pointer */ \
  9232. + 0, /* r12 */ \
  9233. + 0, /* r11 */ \
  9234. + 0, /* r10 */ \
  9235. + 0, /* r9 */ \
  9236. + 0, /* r8 */ \
  9237. + 0, /* r7 */ \
  9238. + 0, /* r6 */ \
  9239. + 0, /* r5 */ \
  9240. + 0, /* r4 */ \
  9241. + 0, /* r3 */ \
  9242. + 0, /* r2 */ \
  9243. + 0, /* r1 */ \
  9244. + 0, /* r0 */ \
  9245. +}
  9246. +
  9247. +/*
  9248. +Like FIXED_REGISTERS but has 1 for each register that is
  9249. +clobbered (in general) by function calls as well as for fixed
  9250. +registers. This macro therefore identifies the registers that are not
  9251. +available for general allocation of values that must live across
  9252. +function calls.
  9253. +
  9254. +If a register has 0 in CALL_USED_REGISTERS, the compiler
  9255. +automatically saves it on function entry and restores it on function
  9256. +exit, if the register is used within the function.
  9257. +*/
  9258. +#define CALL_USED_REGISTERS { \
  9259. + 1, /* Program Counter */ \
  9260. + 0, /* Link Register */ \
  9261. + 1, /* Stack Pointer */ \
  9262. + 1, /* r12 */ \
  9263. + 1, /* r11 */ \
  9264. + 1, /* r10 */ \
  9265. + 1, /* r9 */ \
  9266. + 1, /* r8 */ \
  9267. + 0, /* r7 */ \
  9268. + 0, /* r6 */ \
  9269. + 0, /* r5 */ \
  9270. + 0, /* r4 */ \
  9271. + 0, /* r3 */ \
  9272. + 0, /* r2 */ \
  9273. + 0, /* r1 */ \
  9274. + 0, /* r0 */ \
  9275. +}
  9276. +
  9277. +/* Interrupt functions can only use registers that have already been
  9278. + saved by the prologue, even if they would normally be
  9279. + call-clobbered. */
  9280. +#define HARD_REGNO_RENAME_OK(SRC, DST) \
  9281. + (! IS_INTERRUPT (cfun->machine->func_type) || \
  9282. + df_regs_ever_live_p (DST))
  9283. +
  9284. +
  9285. +/*
  9286. +Zero or more C statements that may conditionally modify five variables
  9287. +fixed_regs, call_used_regs, global_regs,
  9288. +reg_names, and reg_class_contents, to take into account
  9289. +any dependence of these register sets on target flags. The first three
  9290. +of these are of type char [] (interpreted as Boolean vectors).
  9291. +global_regs is a const char *[], and
  9292. +reg_class_contents is a HARD_REG_SET. Before the macro is
  9293. +called, fixed_regs, call_used_regs,
  9294. +reg_class_contents, and reg_names have been initialized
  9295. +from FIXED_REGISTERS, CALL_USED_REGISTERS,
  9296. +REG_CLASS_CONTENTS, and REGISTER_NAMES, respectively.
  9297. +global_regs has been cleared, and any -ffixed-[reg],
  9298. +-fcall-used-[reg] and -fcall-saved-[reg]
  9299. +command options have been applied.
  9300. +
  9301. +You need not define this macro if it has no work to do.
  9302. +
  9303. +If the usage of an entire class of registers depends on the target
  9304. +flags, you may indicate this to GCC by using this macro to modify
  9305. +fixed_regs and call_used_regs to 1 for each of the
  9306. +registers in the classes which should not be used by GCC. Also define
  9307. +the macro REG_CLASS_FROM_LETTER to return NO_REGS if it
  9308. +is called with a letter for a class that shouldn't be used.
  9309. +
  9310. + (However, if this class is not included in GENERAL_REGS and all
  9311. +of the insn patterns whose constraints permit this class are
  9312. +controlled by target switches, then GCC will automatically avoid using
  9313. +these registers when the target switches are opposed to them.)
  9314. +*/
  9315. +#define CONDITIONAL_REGISTER_USAGE \
  9316. + do \
  9317. + { \
  9318. + if (flag_pic) \
  9319. + { \
  9320. + fixed_regs[PIC_OFFSET_TABLE_REGNUM] = 1; \
  9321. + call_used_regs[PIC_OFFSET_TABLE_REGNUM] = 1; \
  9322. + } \
  9323. + } \
  9324. + while (0)
  9325. +
  9326. +
  9327. +/*
  9328. +If the program counter has a register number, define this as that
  9329. +register number. Otherwise, do not define it.
  9330. +*/
  9331. +
  9332. +#define LAST_AVR32_REGNUM 16
  9333. +
  9334. +
  9335. +/** Order of Allocation of Registers **/
  9336. +
  9337. +/*
  9338. +If defined, an initializer for a vector of integers, containing the
  9339. +numbers of hard registers in the order in which GCC should prefer
  9340. +to use them (from most preferred to least).
  9341. +
  9342. +If this macro is not defined, registers are used lowest numbered first
  9343. +(all else being equal).
  9344. +
  9345. +One use of this macro is on machines where the highest numbered
  9346. +registers must always be saved and the save-multiple-registers
  9347. +instruction supports only sequences of consecutive registers. On such
  9348. +machines, define REG_ALLOC_ORDER to be an initializer that lists
  9349. +the highest numbered allocable register first.
  9350. +*/
  9351. +#define REG_ALLOC_ORDER \
  9352. +{ \
  9353. + INTERNAL_REGNUM(8), \
  9354. + INTERNAL_REGNUM(9), \
  9355. + INTERNAL_REGNUM(10), \
  9356. + INTERNAL_REGNUM(11), \
  9357. + INTERNAL_REGNUM(12), \
  9358. + LR_REGNUM, \
  9359. + INTERNAL_REGNUM(7), \
  9360. + INTERNAL_REGNUM(6), \
  9361. + INTERNAL_REGNUM(5), \
  9362. + INTERNAL_REGNUM(4), \
  9363. + INTERNAL_REGNUM(3), \
  9364. + INTERNAL_REGNUM(2), \
  9365. + INTERNAL_REGNUM(1), \
  9366. + INTERNAL_REGNUM(0), \
  9367. + SP_REGNUM, \
  9368. + PC_REGNUM \
  9369. +}
  9370. +
  9371. +
  9372. +/** How Values Fit in Registers **/
  9373. +
  9374. +/*
  9375. +A C expression for the number of consecutive hard registers, starting
  9376. +at register number REGNO, required to hold a value of mode
  9377. +MODE.
  9378. +
  9379. +On a machine where all registers are exactly one word, a suitable
  9380. +definition of this macro is
  9381. +
  9382. +#define HARD_REGNO_NREGS(REGNO, MODE) \
  9383. + ((GET_MODE_SIZE (MODE) + UNITS_PER_WORD - 1) \
  9384. + / UNITS_PER_WORD)
  9385. +*/
  9386. +#define HARD_REGNO_NREGS(REGNO, MODE) \
  9387. + ((unsigned int)((GET_MODE_SIZE(MODE) + UNITS_PER_WORD -1 ) / UNITS_PER_WORD))
  9388. +
  9389. +/*
  9390. +A C expression that is nonzero if it is permissible to store a value
  9391. +of mode MODE in hard register number REGNO (or in several
  9392. +registers starting with that one). For a machine where all registers
  9393. +are equivalent, a suitable definition is
  9394. +
  9395. + #define HARD_REGNO_MODE_OK(REGNO, MODE) 1
  9396. +
  9397. +You need not include code to check for the numbers of fixed registers,
  9398. +because the allocation mechanism considers them to be always occupied.
  9399. +
  9400. +On some machines, double-precision values must be kept in even/odd
  9401. +register pairs. You can implement that by defining this macro to reject
  9402. +odd register numbers for such modes.
  9403. +
  9404. +The minimum requirement for a mode to be OK in a register is that the
  9405. +mov[mode] instruction pattern support moves between the
  9406. +register and other hard register in the same class and that moving a
  9407. +value into the register and back out not alter it.
  9408. +
  9409. +Since the same instruction used to move word_mode will work for
  9410. +all narrower integer modes, it is not necessary on any machine for
  9411. +HARD_REGNO_MODE_OK to distinguish between these modes, provided
  9412. +you define patterns movhi, etc., to take advantage of this. This
  9413. +is useful because of the interaction between HARD_REGNO_MODE_OK
  9414. +and MODES_TIEABLE_P; it is very desirable for all integer modes
  9415. +to be tieable.
  9416. +
  9417. +Many machines have special registers for floating point arithmetic.
  9418. +Often people assume that floating point machine modes are allowed only
  9419. +in floating point registers. This is not true. Any registers that
  9420. +can hold integers can safely hold a floating point machine
  9421. +mode, whether or not floating arithmetic can be done on it in those
  9422. +registers. Integer move instructions can be used to move the values.
  9423. +
  9424. +On some machines, though, the converse is true: fixed-point machine
  9425. +modes may not go in floating registers. This is true if the floating
  9426. +registers normalize any value stored in them, because storing a
  9427. +non-floating value there would garble it. In this case,
  9428. +HARD_REGNO_MODE_OK should reject fixed-point machine modes in
  9429. +floating registers. But if the floating registers do not automatically
  9430. +normalize, if you can store any bit pattern in one and retrieve it
  9431. +unchanged without a trap, then any machine mode may go in a floating
  9432. +register, so you can define this macro to say so.
  9433. +
  9434. +The primary significance of special floating registers is rather that
  9435. +they are the registers acceptable in floating point arithmetic
  9436. +instructions. However, this is of no concern to
  9437. +HARD_REGNO_MODE_OK. You handle it by writing the proper
  9438. +constraints for those instructions.
  9439. +
  9440. +On some machines, the floating registers are especially slow to access,
  9441. +so that it is better to store a value in a stack frame than in such a
  9442. +register if floating point arithmetic is not being done. As long as the
  9443. +floating registers are not in class GENERAL_REGS, they will not
  9444. +be used unless some pattern's constraint asks for one.
  9445. +*/
  9446. +#define HARD_REGNO_MODE_OK(REGNO, MODE) avr32_hard_regno_mode_ok(REGNO, MODE)
  9447. +
  9448. +/*
  9449. +A C expression that is nonzero if a value of mode
  9450. +MODE1 is accessible in mode MODE2 without copying.
  9451. +
  9452. +If HARD_REGNO_MODE_OK(R, MODE1) and
  9453. +HARD_REGNO_MODE_OK(R, MODE2) are always the same for
  9454. +any R, then MODES_TIEABLE_P(MODE1, MODE2)
  9455. +should be nonzero. If they differ for any R, you should define
  9456. +this macro to return zero unless some other mechanism ensures the
  9457. +accessibility of the value in a narrower mode.
  9458. +
  9459. +You should define this macro to return nonzero in as many cases as
  9460. +possible since doing so will allow GCC to perform better register
  9461. +allocation.
  9462. +*/
  9463. +#define MODES_TIEABLE_P(MODE1, MODE2) \
  9464. + (GET_MODE_CLASS (MODE1) == GET_MODE_CLASS (MODE2))
  9465. +
  9466. +
  9467. +
  9468. +/******************************************************************************
  9469. + * Register Classes
  9470. + *****************************************************************************/
  9471. +
  9472. +/*
  9473. +An enumeral type that must be defined with all the register class names
  9474. +as enumeral values. NO_REGS must be first. ALL_REGS
  9475. +must be the last register class, followed by one more enumeral value,
  9476. +LIM_REG_CLASSES, which is not a register class but rather
  9477. +tells how many classes there are.
  9478. +
  9479. +Each register class has a number, which is the value of casting
  9480. +the class name to type int. The number serves as an index
  9481. +in many of the tables described below.
  9482. +*/
  9483. +enum reg_class
  9484. +{
  9485. + NO_REGS,
  9486. + GENERAL_REGS,
  9487. + ALL_REGS,
  9488. + LIM_REG_CLASSES
  9489. +};
  9490. +
  9491. +/*
  9492. +The number of distinct register classes, defined as follows:
  9493. + #define N_REG_CLASSES (int) LIM_REG_CLASSES
  9494. +*/
  9495. +#define N_REG_CLASSES (int)LIM_REG_CLASSES
  9496. +
  9497. +/*
  9498. +An initializer containing the names of the register classes as C string
  9499. +constants. These names are used in writing some of the debugging dumps.
  9500. +*/
  9501. +#define REG_CLASS_NAMES \
  9502. +{ \
  9503. + "NO_REGS", \
  9504. + "GENERAL_REGS", \
  9505. + "ALL_REGS" \
  9506. +}
  9507. +
  9508. +/*
  9509. +An initializer containing the contents of the register classes, as integers
  9510. +which are bit masks. The nth integer specifies the contents of class
  9511. +n. The way the integer mask is interpreted is that
  9512. +register r is in the class if mask & (1 << r) is 1.
  9513. +
  9514. +When the machine has more than 32 registers, an integer does not suffice.
  9515. +Then the integers are replaced by sub-initializers, braced groupings containing
  9516. +several integers. Each sub-initializer must be suitable as an initializer
  9517. +for the type HARD_REG_SET which is defined in hard-reg-set.h.
  9518. +In this situation, the first integer in each sub-initializer corresponds to
  9519. +registers 0 through 31, the second integer to registers 32 through 63, and
  9520. +so on.
  9521. +*/
  9522. +#define REG_CLASS_CONTENTS { \
  9523. + {0x00000000}, /* NO_REGS */ \
  9524. + {0x0000FFFF}, /* GENERAL_REGS */ \
  9525. + {0x7FFFFFFF}, /* ALL_REGS */ \
  9526. +}
  9527. +
  9528. +
  9529. +/*
  9530. +A C expression whose value is a register class containing hard register
  9531. +REGNO. In general there is more than one such class; choose a class
  9532. +which is minimal, meaning that no smaller class also contains the
  9533. +register.
  9534. +*/
  9535. +#define REGNO_REG_CLASS(REGNO) (GENERAL_REGS)
  9536. +
  9537. +/*
  9538. +A macro whose definition is the name of the class to which a valid
  9539. +base register must belong. A base register is one used in an address
  9540. +which is the register value plus a displacement.
  9541. +*/
  9542. +#define BASE_REG_CLASS GENERAL_REGS
  9543. +
  9544. +/*
  9545. +This is a variation of the BASE_REG_CLASS macro which allows
  9546. +the selection of a base register in a mode depenedent manner. If
  9547. +mode is VOIDmode then it should return the same value as
  9548. +BASE_REG_CLASS.
  9549. +*/
  9550. +#define MODE_BASE_REG_CLASS(MODE) BASE_REG_CLASS
  9551. +
  9552. +/*
  9553. +A macro whose definition is the name of the class to which a valid
  9554. +index register must belong. An index register is one used in an
  9555. +address where its value is either multiplied by a scale factor or
  9556. +added to another register (as well as added to a displacement).
  9557. +*/
  9558. +#define INDEX_REG_CLASS BASE_REG_CLASS
  9559. +
  9560. +/*
  9561. +A C expression which defines the machine-dependent operand constraint
  9562. +letters for register classes. If CHAR is such a letter, the
  9563. +value should be the register class corresponding to it. Otherwise,
  9564. +the value should be NO_REGS. The register letter r,
  9565. +corresponding to class GENERAL_REGS, will not be passed
  9566. +to this macro; you do not need to handle it.
  9567. +*/
  9568. +#define REG_CLASS_FROM_LETTER(CHAR) NO_REGS
  9569. +
  9570. +/* These assume that REGNO is a hard or pseudo reg number.
  9571. + They give nonzero only if REGNO is a hard reg of the suitable class
  9572. + or a pseudo reg currently allocated to a suitable hard reg.
  9573. + Since they use reg_renumber, they are safe only once reg_renumber
  9574. + has been allocated, which happens in local-alloc.c. */
  9575. +#define TEST_REGNO(R, TEST, VALUE) \
  9576. + ((R TEST VALUE) || ((unsigned) reg_renumber[R] TEST VALUE))
  9577. +
  9578. +/*
  9579. +A C expression which is nonzero if register number num is suitable for use as a base
  9580. +register in operand addresses. It may be either a suitable hard register or a pseudo
  9581. +register that has been allocated such a hard register.
  9582. +*/
  9583. +#define REGNO_OK_FOR_BASE_P(NUM) TEST_REGNO(NUM, <=, LAST_REGNUM)
  9584. +
  9585. +/* The following macro defines cover classes for Integrated Register
  9586. + Allocator. Cover classes is a set of non-intersected register
  9587. + classes covering all hard registers used for register allocation
  9588. + purpose. Any move between two registers of a cover class should be
  9589. + cheaper than load or store of the registers. The macro value is
  9590. + array of register classes with LIM_REG_CLASSES used as the end
  9591. + marker. */
  9592. +
  9593. +#define IRA_COVER_CLASSES \
  9594. +{ \
  9595. + GENERAL_REGS, LIM_REG_CLASSES \
  9596. +}
  9597. +
  9598. +/*
  9599. +A C expression which is nonzero if register number NUM is
  9600. +suitable for use as an index register in operand addresses. It may be
  9601. +either a suitable hard register or a pseudo register that has been
  9602. +allocated such a hard register.
  9603. +
  9604. +The difference between an index register and a base register is that
  9605. +the index register may be scaled. If an address involves the sum of
  9606. +two registers, neither one of them scaled, then either one may be
  9607. +labeled the ``base'' and the other the ``index''; but whichever
  9608. +labeling is used must fit the machine's constraints of which registers
  9609. +may serve in each capacity. The compiler will try both labelings,
  9610. +looking for one that is valid, and will reload one or both registers
  9611. +only if neither labeling works.
  9612. +*/
  9613. +#define REGNO_OK_FOR_INDEX_P(NUM) TEST_REGNO(NUM, <=, LAST_REGNUM)
  9614. +
  9615. +/*
  9616. +A C expression that places additional restrictions on the register class
  9617. +to use when it is necessary to copy value X into a register in class
  9618. +CLASS. The value is a register class; perhaps CLASS, or perhaps
  9619. +another, smaller class. On many machines, the following definition is
  9620. +safe: #define PREFERRED_RELOAD_CLASS(X,CLASS) CLASS
  9621. +
  9622. +Sometimes returning a more restrictive class makes better code. For
  9623. +example, on the 68000, when X is an integer constant that is in range
  9624. +for a 'moveq' instruction, the value of this macro is always
  9625. +DATA_REGS as long as CLASS includes the data registers.
  9626. +Requiring a data register guarantees that a 'moveq' will be used.
  9627. +
  9628. +If X is a const_double, by returning NO_REGS
  9629. +you can force X into a memory constant. This is useful on
  9630. +certain machines where immediate floating values cannot be loaded into
  9631. +certain kinds of registers.
  9632. +*/
  9633. +#define PREFERRED_RELOAD_CLASS(X, CLASS) CLASS
  9634. +
  9635. +
  9636. +
  9637. +/*
  9638. +A C expression for the maximum number of consecutive registers
  9639. +of class CLASS needed to hold a value of mode MODE.
  9640. +
  9641. +This is closely related to the macro HARD_REGNO_NREGS. In fact,
  9642. +the value of the macro CLASS_MAX_NREGS(CLASS, MODE)
  9643. +should be the maximum value of HARD_REGNO_NREGS(REGNO, MODE)
  9644. +for all REGNO values in the class CLASS.
  9645. +
  9646. +This macro helps control the handling of multiple-word values
  9647. +in the reload pass.
  9648. +*/
  9649. +#define CLASS_MAX_NREGS(CLASS, MODE) /* ToDo:fixme */ \
  9650. + (unsigned int)((GET_MODE_SIZE(MODE) + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
  9651. +
  9652. +
  9653. +/*
  9654. + Using CONST_OK_FOR_CONSTRAINT_P instead of CONS_OK_FOR_LETTER_P
  9655. + in order to support constraints with more than one letter.
  9656. + Only two letters are then used for constant constraints,
  9657. + the letter 'K' and the letter 'I'. The constraint starting with
  9658. + these letters must consist of four characters. The character following
  9659. + 'K' or 'I' must be either 'u' (unsigned) or 's' (signed) to specify
  9660. + if the constant is zero or sign extended. The last two characters specify
  9661. + the length in bits of the constant. The base constraint letter 'I' means
  9662. + that this is an negated constant, meaning that actually -VAL should be
  9663. + checked to lie withing the valid range instead of VAL which is used when
  9664. + 'K' is the base constraint letter.
  9665. +
  9666. +*/
  9667. +
  9668. +#define CONSTRAINT_LEN(C, STR) \
  9669. + ( ((C) == 'K' || (C) == 'I') ? 4 : \
  9670. + ((C) == 'R') ? 5 : \
  9671. + ((C) == 'P') ? -1 : \
  9672. + DEFAULT_CONSTRAINT_LEN((C), (STR)) )
  9673. +
  9674. +#define CONST_OK_FOR_CONSTRAINT_P(VALUE, C, STR) \
  9675. + avr32_const_ok_for_constraint_p(VALUE, C, STR)
  9676. +
  9677. +/*
  9678. +A C expression that defines the machine-dependent operand constraint
  9679. +letters that specify particular ranges of const_double values ('G' or 'H').
  9680. +
  9681. +If C is one of those letters, the expression should check that
  9682. +VALUE, an RTX of code const_double, is in the appropriate
  9683. +range and return 1 if so, 0 otherwise. If C is not one of those
  9684. +letters, the value should be 0 regardless of VALUE.
  9685. +
  9686. +const_double is used for all floating-point constants and for
  9687. +DImode fixed-point constants. A given letter can accept either
  9688. +or both kinds of values. It can use GET_MODE to distinguish
  9689. +between these kinds.
  9690. +*/
  9691. +#define CONST_DOUBLE_OK_FOR_LETTER_P(OP, C) \
  9692. + ((C) == 'G' ? avr32_const_double_immediate(OP) : 0)
  9693. +
  9694. +/*
  9695. +A C expression that defines the optional machine-dependent constraint
  9696. +letters that can be used to segregate specific types of operands, usually
  9697. +memory references, for the target machine. Any letter that is not
  9698. +elsewhere defined and not matched by REG_CLASS_FROM_LETTER
  9699. +may be used. Normally this macro will not be defined.
  9700. +
  9701. +If it is required for a particular target machine, it should return 1
  9702. +if VALUE corresponds to the operand type represented by the
  9703. +constraint letter C. If C is not defined as an extra
  9704. +constraint, the value returned should be 0 regardless of VALUE.
  9705. +
  9706. +For example, on the ROMP, load instructions cannot have their output
  9707. +in r0 if the memory reference contains a symbolic address. Constraint
  9708. +letter 'Q' is defined as representing a memory address that does
  9709. +not contain a symbolic address. An alternative is specified with
  9710. +a 'Q' constraint on the input and 'r' on the output. The next
  9711. +alternative specifies 'm' on the input and a register class that
  9712. +does not include r0 on the output.
  9713. +*/
  9714. +#define EXTRA_CONSTRAINT_STR(OP, C, STR) \
  9715. + ((C) == 'W' ? avr32_address_operand(OP, GET_MODE(OP)) : \
  9716. + (C) == 'R' ? (avr32_indirect_register_operand(OP, GET_MODE(OP)) || \
  9717. + (avr32_imm_disp_memory_operand(OP, GET_MODE(OP)) \
  9718. + && avr32_const_ok_for_constraint_p( \
  9719. + INTVAL(XEXP(XEXP(OP, 0), 1)), \
  9720. + (STR)[1], &(STR)[1]))) : \
  9721. + (C) == 'S' ? avr32_indexed_memory_operand(OP, GET_MODE(OP)) : \
  9722. + (C) == 'T' ? avr32_const_pool_ref_operand(OP, GET_MODE(OP)) : \
  9723. + (C) == 'U' ? SYMBOL_REF_RCALL_FUNCTION_P(OP) : \
  9724. + (C) == 'Z' ? avr32_cop_memory_operand(OP, GET_MODE(OP)) : \
  9725. + (C) == 'Q' ? avr32_non_rmw_memory_operand(OP, GET_MODE(OP)) : \
  9726. + (C) == 'Y' ? avr32_rmw_memory_operand(OP, GET_MODE(OP)) : \
  9727. + 0)
  9728. +
  9729. +
  9730. +#define EXTRA_MEMORY_CONSTRAINT(C, STR) ( ((C) == 'R') || \
  9731. + ((C) == 'Q') || \
  9732. + ((C) == 'S') || \
  9733. + ((C) == 'Y') || \
  9734. + ((C) == 'Z') )
  9735. +
  9736. +
  9737. +/* Returns nonzero if op is a function SYMBOL_REF which
  9738. + can be called using an rcall instruction */
  9739. +#define SYMBOL_REF_RCALL_FUNCTION_P(op) \
  9740. + ( GET_CODE(op) == SYMBOL_REF \
  9741. + && SYMBOL_REF_FUNCTION_P(op) \
  9742. + && SYMBOL_REF_LOCAL_P(op) \
  9743. + && !SYMBOL_REF_EXTERNAL_P(op) \
  9744. + && !TARGET_HAS_ASM_ADDR_PSEUDOS )
  9745. +
  9746. +/******************************************************************************
  9747. + * Stack Layout and Calling Conventions
  9748. + *****************************************************************************/
  9749. +
  9750. +/** Basic Stack Layout **/
  9751. +
  9752. +/*
  9753. +Define this macro if pushing a word onto the stack moves the stack
  9754. +pointer to a smaller address.
  9755. +
  9756. +When we say, ``define this macro if ...,'' it means that the
  9757. +compiler checks this macro only with #ifdef so the precise
  9758. +definition used does not matter.
  9759. +*/
  9760. +/* pushm decrece SP: *(--SP) <-- Rx */
  9761. +#define STACK_GROWS_DOWNWARD
  9762. +
  9763. +/*
  9764. +This macro defines the operation used when something is pushed
  9765. +on the stack. In RTL, a push operation will be
  9766. +(set (mem (STACK_PUSH_CODE (reg sp))) ...)
  9767. +
  9768. +The choices are PRE_DEC, POST_DEC, PRE_INC,
  9769. +and POST_INC. Which of these is correct depends on
  9770. +the stack direction and on whether the stack pointer points
  9771. +to the last item on the stack or whether it points to the
  9772. +space for the next item on the stack.
  9773. +
  9774. +The default is PRE_DEC when STACK_GROWS_DOWNWARD is
  9775. +defined, which is almost always right, and PRE_INC otherwise,
  9776. +which is often wrong.
  9777. +*/
  9778. +/* pushm: *(--SP) <-- Rx */
  9779. +#define STACK_PUSH_CODE PRE_DEC
  9780. +
  9781. +/* Define this to nonzero if the nominal address of the stack frame
  9782. + is at the high-address end of the local variables;
  9783. + that is, each additional local variable allocated
  9784. + goes at a more negative offset in the frame. */
  9785. +#define FRAME_GROWS_DOWNWARD 1
  9786. +
  9787. +
  9788. +/*
  9789. +Offset from the frame pointer to the first local variable slot to be allocated.
  9790. +
  9791. +If FRAME_GROWS_DOWNWARD, find the next slot's offset by
  9792. +subtracting the first slot's length from STARTING_FRAME_OFFSET.
  9793. +Otherwise, it is found by adding the length of the first slot to the
  9794. +value STARTING_FRAME_OFFSET.
  9795. + (i'm not sure if the above is still correct.. had to change it to get
  9796. + rid of an overfull. --mew 2feb93 )
  9797. +*/
  9798. +#define STARTING_FRAME_OFFSET 0
  9799. +
  9800. +/*
  9801. +Offset from the stack pointer register to the first location at which
  9802. +outgoing arguments are placed. If not specified, the default value of
  9803. +zero is used. This is the proper value for most machines.
  9804. +
  9805. +If ARGS_GROW_DOWNWARD, this is the offset to the location above
  9806. +the first location at which outgoing arguments are placed.
  9807. +*/
  9808. +#define STACK_POINTER_OFFSET 0
  9809. +
  9810. +/*
  9811. +Offset from the argument pointer register to the first argument's
  9812. +address. On some machines it may depend on the data type of the
  9813. +function.
  9814. +
  9815. +If ARGS_GROW_DOWNWARD, this is the offset to the location above
  9816. +the first argument's address.
  9817. +*/
  9818. +#define FIRST_PARM_OFFSET(FUNDECL) 0
  9819. +
  9820. +
  9821. +/*
  9822. +A C expression whose value is RTL representing the address in a stack
  9823. +frame where the pointer to the caller's frame is stored. Assume that
  9824. +FRAMEADDR is an RTL expression for the address of the stack frame
  9825. +itself.
  9826. +
  9827. +If you don't define this macro, the default is to return the value
  9828. +of FRAMEADDR - that is, the stack frame address is also the
  9829. +address of the stack word that points to the previous frame.
  9830. +*/
  9831. +#define DYNAMIC_CHAIN_ADDRESS(FRAMEADDR) plus_constant ((FRAMEADDR), 4)
  9832. +
  9833. +
  9834. +/*
  9835. +A C expression whose value is RTL representing the value of the return
  9836. +address for the frame COUNT steps up from the current frame, after
  9837. +the prologue. FRAMEADDR is the frame pointer of the COUNT
  9838. +frame, or the frame pointer of the COUNT - 1 frame if
  9839. +RETURN_ADDR_IN_PREVIOUS_FRAME is defined.
  9840. +
  9841. +The value of the expression must always be the correct address when
  9842. +COUNT is zero, but may be NULL_RTX if there is not way to
  9843. +determine the return address of other frames.
  9844. +*/
  9845. +#define RETURN_ADDR_RTX(COUNT, FRAMEADDR) avr32_return_addr(COUNT, FRAMEADDR)
  9846. +
  9847. +
  9848. +/*
  9849. +A C expression whose value is RTL representing the location of the
  9850. +incoming return address at the beginning of any function, before the
  9851. +prologue. This RTL is either a REG, indicating that the return
  9852. +value is saved in 'REG', or a MEM representing a location in
  9853. +the stack.
  9854. +
  9855. +You only need to define this macro if you want to support call frame
  9856. +debugging information like that provided by DWARF 2.
  9857. +
  9858. +If this RTL is a REG, you should also define
  9859. +DWARF_FRAME_RETURN_COLUMN to DWARF_FRAME_REGNUM (REGNO).
  9860. +*/
  9861. +#define INCOMING_RETURN_ADDR_RTX gen_rtx_REG (Pmode, LR_REGNUM)
  9862. +
  9863. +/*
  9864. +A C expression whose value is an integer giving the offset, in bytes,
  9865. +from the value of the stack pointer register to the top of the stack
  9866. +frame at the beginning of any function, before the prologue. The top of
  9867. +the frame is defined to be the value of the stack pointer in the
  9868. +previous frame, just before the call instruction.
  9869. +
  9870. +You only need to define this macro if you want to support call frame
  9871. +debugging information like that provided by DWARF 2.
  9872. +*/
  9873. +#define INCOMING_FRAME_SP_OFFSET 0
  9874. +
  9875. +
  9876. +/** Exception Handling Support **/
  9877. +
  9878. +/* Use setjump/longjump for exception handling. */
  9879. +#define DWARF2_UNWIND_INFO 0
  9880. +#define MUST_USE_SJLJ_EXCEPTIONS 1
  9881. +
  9882. +/*
  9883. +A C expression whose value is the Nth register number used for
  9884. +data by exception handlers, or INVALID_REGNUM if fewer than
  9885. +N registers are usable.
  9886. +
  9887. +The exception handling library routines communicate with the exception
  9888. +handlers via a set of agreed upon registers. Ideally these registers
  9889. +should be call-clobbered; it is possible to use call-saved registers,
  9890. +but may negatively impact code size. The target must support at least
  9891. +2 data registers, but should define 4 if there are enough free registers.
  9892. +
  9893. +You must define this macro if you want to support call frame exception
  9894. +handling like that provided by DWARF 2.
  9895. +*/
  9896. +/*
  9897. + Use r9-r11
  9898. +*/
  9899. +#define EH_RETURN_DATA_REGNO(N) \
  9900. + ((N<3) ? INTERNAL_REGNUM(N+9) : INVALID_REGNUM)
  9901. +
  9902. +/*
  9903. +A C expression whose value is RTL representing a location in which
  9904. +to store a stack adjustment to be applied before function return.
  9905. +This is used to unwind the stack to an exception handler's call frame.
  9906. +It will be assigned zero on code paths that return normally.
  9907. +
  9908. +Typically this is a call-clobbered hard register that is otherwise
  9909. +untouched by the epilogue, but could also be a stack slot.
  9910. +
  9911. +You must define this macro if you want to support call frame exception
  9912. +handling like that provided by DWARF 2.
  9913. +*/
  9914. +/*
  9915. + Use r8
  9916. +*/
  9917. +#define EH_RETURN_STACKADJ_REGNO INTERNAL_REGNUM(8)
  9918. +#define EH_RETURN_STACKADJ_RTX gen_rtx_REG(SImode, EH_RETURN_STACKADJ_REGNO)
  9919. +
  9920. +/*
  9921. +A C expression whose value is RTL representing a location in which
  9922. +to store the address of an exception handler to which we should
  9923. +return. It will not be assigned on code paths that return normally.
  9924. +
  9925. +Typically this is the location in the call frame at which the normal
  9926. +return address is stored. For targets that return by popping an
  9927. +address off the stack, this might be a memory address just below
  9928. +the target call frame rather than inside the current call
  9929. +frame. EH_RETURN_STACKADJ_RTX will have already been assigned,
  9930. +so it may be used to calculate the location of the target call frame.
  9931. +
  9932. +Some targets have more complex requirements than storing to an
  9933. +address calculable during initial code generation. In that case
  9934. +the eh_return instruction pattern should be used instead.
  9935. +
  9936. +If you want to support call frame exception handling, you must
  9937. +define either this macro or the eh_return instruction pattern.
  9938. +*/
  9939. +/*
  9940. + We define the eh_return instruction pattern, so this isn't needed.
  9941. +*/
  9942. +/* #define EH_RETURN_HANDLER_RTX gen_rtx_REG(Pmode, RET_REGISTER) */
  9943. +
  9944. +/*
  9945. + This macro chooses the encoding of pointers embedded in the
  9946. + exception handling sections. If at all possible, this should be
  9947. + defined such that the exception handling section will not require
  9948. + dynamic relocations, and so may be read-only.
  9949. +
  9950. + code is 0 for data, 1 for code labels, 2 for function
  9951. + pointers. global is true if the symbol may be affected by dynamic
  9952. + relocations. The macro should return a combination of the DW_EH_PE_*
  9953. + defines as found in dwarf2.h.
  9954. +
  9955. + If this macro is not defined, pointers will not be encoded but
  9956. + represented directly.
  9957. +*/
  9958. +#define ASM_PREFERRED_EH_DATA_FORMAT(CODE, GLOBAL) \
  9959. + ((flag_pic && (GLOBAL) ? DW_EH_PE_indirect : 0) \
  9960. + | (flag_pic ? DW_EH_PE_pcrel : DW_EH_PE_absptr) \
  9961. + | DW_EH_PE_sdata4)
  9962. +
  9963. +/* ToDo: The rest of this subsection */
  9964. +
  9965. +/** Specifying How Stack Checking is Done **/
  9966. +/* ToDo: All in this subsection */
  9967. +
  9968. +/** Registers That Address the Stack Frame **/
  9969. +
  9970. +/*
  9971. +The register number of the stack pointer register, which must also be a
  9972. +fixed register according to FIXED_REGISTERS. On most machines,
  9973. +the hardware determines which register this is.
  9974. +*/
  9975. +/* Using r13 as stack pointer. */
  9976. +#define STACK_POINTER_REGNUM INTERNAL_REGNUM(13)
  9977. +
  9978. +/*
  9979. +The register number of the frame pointer register, which is used to
  9980. +access automatic variables in the stack frame. On some machines, the
  9981. +hardware determines which register this is. On other machines, you can
  9982. +choose any register you wish for this purpose.
  9983. +*/
  9984. +/* Use r7 */
  9985. +#define FRAME_POINTER_REGNUM INTERNAL_REGNUM(7)
  9986. +
  9987. +/*
  9988. +The register number of the arg pointer register, which is used to access
  9989. +the function's argument list. On some machines, this is the same as the
  9990. +frame pointer register. On some machines, the hardware determines which
  9991. +register this is. On other machines, you can choose any register you
  9992. +wish for this purpose. If this is not the same register as the frame
  9993. +pointer register, then you must mark it as a fixed register according to
  9994. +FIXED_REGISTERS, or arrange to be able to eliminate it (see Section
  9995. +10.10.5 [Elimination], page 224).
  9996. +*/
  9997. +/* Using r5 */
  9998. +#define ARG_POINTER_REGNUM INTERNAL_REGNUM(4)
  9999. +
  10000. +
  10001. +/*
  10002. +Register numbers used for passing a function's static chain pointer. If
  10003. +register windows are used, the register number as seen by the called
  10004. +function is STATIC_CHAIN_INCOMING_REGNUM, while the register
  10005. +number as seen by the calling function is STATIC_CHAIN_REGNUM. If
  10006. +these registers are the same, STATIC_CHAIN_INCOMING_REGNUM need
  10007. +not be defined.
  10008. +
  10009. +The static chain register need not be a fixed register.
  10010. +
  10011. +If the static chain is passed in memory, these macros should not be
  10012. +defined; instead, the next two macros should be defined.
  10013. +*/
  10014. +/* Using r0 */
  10015. +#define STATIC_CHAIN_REGNUM INTERNAL_REGNUM(0)
  10016. +
  10017. +/** Eliminating Frame Pointer and Arg Pointer **/
  10018. +
  10019. +/*
  10020. +A C expression which is nonzero if a function must have and use a frame
  10021. +pointer. This expression is evaluated in the reload pass. If its value is
  10022. +nonzero the function will have a frame pointer.
  10023. +
  10024. +The expression can in principle examine the current function and decide
  10025. +according to the facts, but on most machines the constant 0 or the
  10026. +constant 1 suffices. Use 0 when the machine allows code to be generated
  10027. +with no frame pointer, and doing so saves some time or space. Use 1
  10028. +when there is no possible advantage to avoiding a frame pointer.
  10029. +
  10030. +In certain cases, the compiler does not know how to produce valid code
  10031. +without a frame pointer. The compiler recognizes those cases and
  10032. +automatically gives the function a frame pointer regardless of what
  10033. +FRAME_POINTER_REQUIRED says. You don't need to worry about
  10034. +them.
  10035. +
  10036. +In a function that does not require a frame pointer, the frame pointer
  10037. +register can be allocated for ordinary usage, unless you mark it as a
  10038. +fixed register. See FIXED_REGISTERS for more information.
  10039. +*/
  10040. +/* We need the frame pointer when compiling for profiling */
  10041. +#define FRAME_POINTER_REQUIRED (crtl->profile)
  10042. +
  10043. +/*
  10044. +A C statement to store in the variable DEPTH_VAR the difference
  10045. +between the frame pointer and the stack pointer values immediately after
  10046. +the function prologue. The value would be computed from information
  10047. +such as the result of get_frame_size () and the tables of
  10048. +registers regs_ever_live and call_used_regs.
  10049. +
  10050. +If ELIMINABLE_REGS is defined, this macro will be not be used and
  10051. +need not be defined. Otherwise, it must be defined even if
  10052. +FRAME_POINTER_REQUIRED is defined to always be true; in that
  10053. +case, you may set DEPTH_VAR to anything.
  10054. +*/
  10055. +#define INITIAL_FRAME_POINTER_OFFSET(DEPTH_VAR) ((DEPTH_VAR) = get_frame_size())
  10056. +
  10057. +/*
  10058. +If defined, this macro specifies a table of register pairs used to
  10059. +eliminate unneeded registers that point into the stack frame. If it is not
  10060. +defined, the only elimination attempted by the compiler is to replace
  10061. +references to the frame pointer with references to the stack pointer.
  10062. +
  10063. +The definition of this macro is a list of structure initializations, each
  10064. +of which specifies an original and replacement register.
  10065. +
  10066. +On some machines, the position of the argument pointer is not known until
  10067. +the compilation is completed. In such a case, a separate hard register
  10068. +must be used for the argument pointer. This register can be eliminated by
  10069. +replacing it with either the frame pointer or the argument pointer,
  10070. +depending on whether or not the frame pointer has been eliminated.
  10071. +
  10072. +In this case, you might specify:
  10073. + #define ELIMINABLE_REGS \
  10074. + {{ARG_POINTER_REGNUM, STACK_POINTER_REGNUM}, \
  10075. + {ARG_POINTER_REGNUM, FRAME_POINTER_REGNUM}, \
  10076. + {FRAME_POINTER_REGNUM, STACK_POINTER_REGNUM}}
  10077. +
  10078. +Note that the elimination of the argument pointer with the stack pointer is
  10079. +specified first since that is the preferred elimination.
  10080. +*/
  10081. +#define ELIMINABLE_REGS \
  10082. +{ \
  10083. + { FRAME_POINTER_REGNUM, STACK_POINTER_REGNUM }, \
  10084. + { ARG_POINTER_REGNUM, STACK_POINTER_REGNUM }, \
  10085. + { ARG_POINTER_REGNUM, FRAME_POINTER_REGNUM } \
  10086. +}
  10087. +
  10088. +/*
  10089. +A C expression that returns nonzero if the compiler is allowed to try
  10090. +to replace register number FROM with register number
  10091. +TO. This macro need only be defined if ELIMINABLE_REGS
  10092. +is defined, and will usually be the constant 1, since most of the cases
  10093. +preventing register elimination are things that the compiler already
  10094. +knows about.
  10095. +*/
  10096. +#define CAN_ELIMINATE(FROM, TO) 1
  10097. +
  10098. +/*
  10099. +This macro is similar to INITIAL_FRAME_POINTER_OFFSET. It
  10100. +specifies the initial difference between the specified pair of
  10101. +registers. This macro must be defined if ELIMINABLE_REGS is
  10102. +defined.
  10103. +*/
  10104. +#define INITIAL_ELIMINATION_OFFSET(FROM, TO, OFFSET) \
  10105. + ((OFFSET) = avr32_initial_elimination_offset(FROM, TO))
  10106. +
  10107. +/** Passing Function Arguments on the Stack **/
  10108. +
  10109. +
  10110. +/*
  10111. +A C expression. If nonzero, push insns will be used to pass
  10112. +outgoing arguments.
  10113. +If the target machine does not have a push instruction, set it to zero.
  10114. +That directs GCC to use an alternate strategy: to
  10115. +allocate the entire argument block and then store the arguments into
  10116. +it. When PUSH_ARGS is nonzero, PUSH_ROUNDING must be defined too.
  10117. +*/
  10118. +#define PUSH_ARGS 1
  10119. +
  10120. +/*
  10121. +A C expression that is the number of bytes actually pushed onto the
  10122. +stack when an instruction attempts to push NPUSHED bytes.
  10123. +
  10124. +On some machines, the definition
  10125. +
  10126. + #define PUSH_ROUNDING(BYTES) (BYTES)
  10127. +
  10128. +will suffice. But on other machines, instructions that appear
  10129. +to push one byte actually push two bytes in an attempt to maintain
  10130. +alignment. Then the definition should be
  10131. +
  10132. + #define PUSH_ROUNDING(BYTES) (((BYTES) + 1) & ~1)
  10133. +*/
  10134. +/* Push 4 bytes at the time. */
  10135. +#define PUSH_ROUNDING(NPUSHED) (((NPUSHED) + 3) & ~3)
  10136. +
  10137. +/*
  10138. +A C expression. If nonzero, the maximum amount of space required for
  10139. +outgoing arguments will be computed and placed into the variable
  10140. +current_function_outgoing_args_size. No space will be pushed
  10141. +onto the stack for each call; instead, the function prologue should
  10142. +increase the stack frame size by this amount.
  10143. +
  10144. +Setting both PUSH_ARGS and ACCUMULATE_OUTGOING_ARGS is not proper.
  10145. +*/
  10146. +#define ACCUMULATE_OUTGOING_ARGS 0
  10147. +
  10148. +/*
  10149. +A C expression that should indicate the number of bytes of its own
  10150. +arguments that a function pops on returning, or 0 if the
  10151. +function pops no arguments and the caller must therefore pop them all
  10152. +after the function returns.
  10153. +
  10154. +FUNDECL is a C variable whose value is a tree node that describes
  10155. +the function in question. Normally it is a node of type
  10156. +FUNCTION_DECL that describes the declaration of the function.
  10157. +From this you can obtain the DECL_ATTRIBUTES of the function.
  10158. +
  10159. +FUNTYPE is a C variable whose value is a tree node that
  10160. +describes the function in question. Normally it is a node of type
  10161. +FUNCTION_TYPE that describes the data type of the function.
  10162. +From this it is possible to obtain the data types of the value and
  10163. +arguments (if known).
  10164. +
  10165. +When a call to a library function is being considered, FUNDECL
  10166. +will contain an identifier node for the library function. Thus, if
  10167. +you need to distinguish among various library functions, you can do so
  10168. +by their names. Note that ``library function'' in this context means
  10169. +a function used to perform arithmetic, whose name is known specially
  10170. +in the compiler and was not mentioned in the C code being compiled.
  10171. +
  10172. +STACK_SIZE is the number of bytes of arguments passed on the
  10173. +stack. If a variable number of bytes is passed, it is zero, and
  10174. +argument popping will always be the responsibility of the calling function.
  10175. +
  10176. +On the VAX, all functions always pop their arguments, so the definition
  10177. +of this macro is STACK_SIZE. On the 68000, using the standard
  10178. +calling convention, no functions pop their arguments, so the value of
  10179. +the macro is always 0 in this case. But an alternative calling
  10180. +convention is available in which functions that take a fixed number of
  10181. +arguments pop them but other functions (such as printf) pop
  10182. +nothing (the caller pops all). When this convention is in use,
  10183. +FUNTYPE is examined to determine whether a function takes a fixed
  10184. +number of arguments.
  10185. +*/
  10186. +#define RETURN_POPS_ARGS(FUNDECL, FUNTYPE, STACK_SIZE) 0
  10187. +
  10188. +
  10189. +/*Return true if this function can we use a single return instruction*/
  10190. +#define USE_RETURN_INSN(ISCOND) avr32_use_return_insn(ISCOND)
  10191. +
  10192. +/*
  10193. +A C expression that should indicate the number of bytes a call sequence
  10194. +pops off the stack. It is added to the value of RETURN_POPS_ARGS
  10195. +when compiling a function call.
  10196. +
  10197. +CUM is the variable in which all arguments to the called function
  10198. +have been accumulated.
  10199. +
  10200. +On certain architectures, such as the SH5, a call trampoline is used
  10201. +that pops certain registers off the stack, depending on the arguments
  10202. +that have been passed to the function. Since this is a property of the
  10203. +call site, not of the called function, RETURN_POPS_ARGS is not
  10204. +appropriate.
  10205. +*/
  10206. +#define CALL_POPS_ARGS(CUM) 0
  10207. +
  10208. +/* Passing Arguments in Registers */
  10209. +
  10210. +/*
  10211. +A C expression that controls whether a function argument is passed
  10212. +in a register, and which register.
  10213. +
  10214. +The arguments are CUM, which summarizes all the previous
  10215. +arguments; MODE, the machine mode of the argument; TYPE,
  10216. +the data type of the argument as a tree node or 0 if that is not known
  10217. +(which happens for C support library functions); and NAMED,
  10218. +which is 1 for an ordinary argument and 0 for nameless arguments that
  10219. +correspond to '...' in the called function's prototype.
  10220. +TYPE can be an incomplete type if a syntax error has previously
  10221. +occurred.
  10222. +
  10223. +The value of the expression is usually either a reg RTX for the
  10224. +hard register in which to pass the argument, or zero to pass the
  10225. +argument on the stack.
  10226. +
  10227. +For machines like the VAX and 68000, where normally all arguments are
  10228. +pushed, zero suffices as a definition.
  10229. +
  10230. +The value of the expression can also be a parallel RTX. This is
  10231. +used when an argument is passed in multiple locations. The mode of the
  10232. +of the parallel should be the mode of the entire argument. The
  10233. +parallel holds any number of expr_list pairs; each one
  10234. +describes where part of the argument is passed. In each
  10235. +expr_list the first operand must be a reg RTX for the hard
  10236. +register in which to pass this part of the argument, and the mode of the
  10237. +register RTX indicates how large this part of the argument is. The
  10238. +second operand of the expr_list is a const_int which gives
  10239. +the offset in bytes into the entire argument of where this part starts.
  10240. +As a special exception the first expr_list in the parallel
  10241. +RTX may have a first operand of zero. This indicates that the entire
  10242. +argument is also stored on the stack.
  10243. +
  10244. +The last time this macro is called, it is called with MODE == VOIDmode,
  10245. +and its result is passed to the call or call_value
  10246. +pattern as operands 2 and 3 respectively.
  10247. +
  10248. +The usual way to make the ISO library 'stdarg.h' work on a machine
  10249. +where some arguments are usually passed in registers, is to cause
  10250. +nameless arguments to be passed on the stack instead. This is done
  10251. +by making FUNCTION_ARG return 0 whenever NAMED is 0.
  10252. +
  10253. +You may use the macro MUST_PASS_IN_STACK (MODE, TYPE)
  10254. +in the definition of this macro to determine if this argument is of a
  10255. +type that must be passed in the stack. If REG_PARM_STACK_SPACE
  10256. +is not defined and FUNCTION_ARG returns nonzero for such an
  10257. +argument, the compiler will abort. If REG_PARM_STACK_SPACE is
  10258. +defined, the argument will be computed in the stack and then loaded into
  10259. +a register. */
  10260. +
  10261. +#define FUNCTION_ARG(CUM, MODE, TYPE, NAMED) \
  10262. + avr32_function_arg(&(CUM), MODE, TYPE, NAMED)
  10263. +
  10264. +/*
  10265. +A C type for declaring a variable that is used as the first argument of
  10266. +FUNCTION_ARG and other related values. For some target machines,
  10267. +the type int suffices and can hold the number of bytes of
  10268. +argument so far.
  10269. +
  10270. +There is no need to record in CUMULATIVE_ARGS anything about the
  10271. +arguments that have been passed on the stack. The compiler has other
  10272. +variables to keep track of that. For target machines on which all
  10273. +arguments are passed on the stack, there is no need to store anything in
  10274. +CUMULATIVE_ARGS; however, the data structure must exist and
  10275. +should not be empty, so use int.
  10276. +*/
  10277. +typedef struct avr32_args
  10278. +{
  10279. + /* Index representing the argument register the current function argument
  10280. + will occupy */
  10281. + int index;
  10282. + /* A mask with bits representing the argument registers: if a bit is set
  10283. + then this register is used for an argument */
  10284. + int used_index;
  10285. + /* TRUE if this function has anonymous arguments */
  10286. + int uses_anonymous_args;
  10287. + /* The size in bytes of the named arguments pushed on the stack */
  10288. + int stack_pushed_args_size;
  10289. + /* Set to true if this function needs a Return Value Pointer */
  10290. + int use_rvp;
  10291. + /* Set to true if function is a flashvault function. */
  10292. + int flashvault_func;
  10293. +
  10294. +} CUMULATIVE_ARGS;
  10295. +
  10296. +
  10297. +#define FIRST_CUM_REG_INDEX 0
  10298. +#define LAST_CUM_REG_INDEX 4
  10299. +#define GET_REG_INDEX(CUM) ((CUM)->index)
  10300. +#define SET_REG_INDEX(CUM, INDEX) ((CUM)->index = (INDEX));
  10301. +#define GET_USED_INDEX(CUM, INDEX) ((CUM)->used_index & (1 << (INDEX)))
  10302. +#define SET_USED_INDEX(CUM, INDEX) \
  10303. + do \
  10304. + { \
  10305. + if (INDEX >= 0) \
  10306. + (CUM)->used_index |= (1 << (INDEX)); \
  10307. + } \
  10308. + while (0)
  10309. +#define SET_INDEXES_UNUSED(CUM) ((CUM)->used_index = 0)
  10310. +
  10311. +/*
  10312. + A C statement (sans semicolon) for initializing the variable cum for the
  10313. + state at the beginning of the argument list. The variable has type
  10314. + CUMULATIVE_ARGS. The value of FNTYPE is the tree node for the data type of
  10315. + the function which will receive the args, or 0 if the args are to a compiler
  10316. + support library function. For direct calls that are not libcalls, FNDECL
  10317. + contain the declaration node of the function. FNDECL is also set when
  10318. + INIT_CUMULATIVE_ARGS is used to find arguments for the function being
  10319. + compiled. N_NAMED_ARGS is set to the number of named arguments, including a
  10320. + structure return address if it is passed as a parameter, when making a call.
  10321. + When processing incoming arguments, N_NAMED_ARGS is set to -1.
  10322. +
  10323. + When processing a call to a compiler support library function, LIBNAME
  10324. + identifies which one. It is a symbol_ref rtx which contains the name of the
  10325. + function, as a string. LIBNAME is 0 when an ordinary C function call is
  10326. + being processed. Thus, each time this macro is called, either LIBNAME or
  10327. + FNTYPE is nonzero, but never both of them at once.
  10328. +*/
  10329. +#define INIT_CUMULATIVE_ARGS(CUM, FNTYPE, LIBNAME, FNDECL, N_NAMED_ARGS) \
  10330. + avr32_init_cumulative_args(&(CUM), FNTYPE, LIBNAME, FNDECL)
  10331. +
  10332. +/*
  10333. +A C statement (sans semicolon) to update the summarizer variable
  10334. +CUM to advance past an argument in the argument list. The
  10335. +values MODE, TYPE and NAMED describe that argument.
  10336. +Once this is done, the variable CUM is suitable for analyzing
  10337. +the following argument with FUNCTION_ARG, etc.
  10338. +
  10339. +This macro need not do anything if the argument in question was passed
  10340. +on the stack. The compiler knows how to track the amount of stack space
  10341. +used for arguments without any special help.
  10342. +*/
  10343. +#define FUNCTION_ARG_ADVANCE(CUM, MODE, TYPE, NAMED) \
  10344. + avr32_function_arg_advance(&(CUM), MODE, TYPE, NAMED)
  10345. +
  10346. +/*
  10347. +If defined, a C expression which determines whether, and in which direction,
  10348. +to pad out an argument with extra space. The value should be of type
  10349. +enum direction: either 'upward' to pad above the argument,
  10350. +'downward' to pad below, or 'none' to inhibit padding.
  10351. +
  10352. +The amount of padding is always just enough to reach the next
  10353. +multiple of FUNCTION_ARG_BOUNDARY; this macro does not control
  10354. +it.
  10355. +
  10356. +This macro has a default definition which is right for most systems.
  10357. +For little-endian machines, the default is to pad upward. For
  10358. +big-endian machines, the default is to pad downward for an argument of
  10359. +constant size shorter than an int, and upward otherwise.
  10360. +*/
  10361. +#define FUNCTION_ARG_PADDING(MODE, TYPE) \
  10362. + avr32_function_arg_padding(MODE, TYPE)
  10363. +
  10364. +/*
  10365. + Specify padding for the last element of a block move between registers
  10366. + and memory. First is nonzero if this is the only element. Defining
  10367. + this macro allows better control of register function parameters on
  10368. + big-endian machines, without using PARALLEL rtl. In particular,
  10369. + MUST_PASS_IN_STACK need not test padding and mode of types in registers,
  10370. + as there is no longer a "wrong" part of a register; For example, a three
  10371. + byte aggregate may be passed in the high part of a register if so required.
  10372. +*/
  10373. +#define BLOCK_REG_PADDING(MODE, TYPE, FIRST) \
  10374. + avr32_function_arg_padding(MODE, TYPE)
  10375. +
  10376. +/*
  10377. +If defined, a C expression which determines whether the default
  10378. +implementation of va_arg will attempt to pad down before reading the
  10379. +next argument, if that argument is smaller than its aligned space as
  10380. +controlled by PARM_BOUNDARY. If this macro is not defined, all such
  10381. +arguments are padded down if BYTES_BIG_ENDIAN is true.
  10382. +*/
  10383. +#define PAD_VARARGS_DOWN \
  10384. + (FUNCTION_ARG_PADDING (TYPE_MODE (type), type) == downward)
  10385. +
  10386. +/*
  10387. +A C expression that is nonzero if REGNO is the number of a hard
  10388. +register in which function arguments are sometimes passed. This does
  10389. +not include implicit arguments such as the static chain and
  10390. +the structure-value address. On many machines, no registers can be
  10391. +used for this purpose since all function arguments are pushed on the
  10392. +stack.
  10393. +*/
  10394. +/*
  10395. + Use r8 - r12 for function arguments.
  10396. +*/
  10397. +#define FUNCTION_ARG_REGNO_P(REGNO) \
  10398. + (REGNO >= 3 && REGNO <= 7)
  10399. +
  10400. +/* Number of registers used for passing function arguments */
  10401. +#define NUM_ARG_REGS 5
  10402. +
  10403. +/*
  10404. +If defined, the order in which arguments are loaded into their
  10405. +respective argument registers is reversed so that the last
  10406. +argument is loaded first. This macro only affects arguments
  10407. +passed in registers.
  10408. +*/
  10409. +/* #define LOAD_ARGS_REVERSED */
  10410. +
  10411. +/** How Scalar Function Values Are Returned **/
  10412. +
  10413. +/* AVR32 is using r12 as return register. */
  10414. +#define RET_REGISTER (15 - 12)
  10415. +
  10416. +/*
  10417. +A C expression to create an RTX representing the place where a library
  10418. +function returns a value of mode MODE. If the precise function
  10419. +being called is known, FUNC is a tree node
  10420. +(FUNCTION_DECL) for it; otherwise, func is a null
  10421. +pointer. This makes it possible to use a different value-returning
  10422. +convention for specific functions when all their calls are
  10423. +known.
  10424. +
  10425. +Note that "library function" in this context means a compiler
  10426. +support routine, used to perform arithmetic, whose name is known
  10427. +specially by the compiler and was not mentioned in the C code being
  10428. +compiled.
  10429. +
  10430. +The definition of LIBRARY_VALUE need not be concerned aggregate
  10431. +data types, because none of the library functions returns such types.
  10432. +*/
  10433. +#define LIBCALL_VALUE(MODE) avr32_libcall_value(MODE)
  10434. +
  10435. +/*
  10436. +A C expression that is nonzero if REGNO is the number of a hard
  10437. +register in which the values of called function may come back.
  10438. +
  10439. +A register whose use for returning values is limited to serving as the
  10440. +second of a pair (for a value of type double, say) need not be
  10441. +recognized by this macro. So for most machines, this definition
  10442. +suffices:
  10443. + #define FUNCTION_VALUE_REGNO_P(N) ((N) == 0)
  10444. +
  10445. +If the machine has register windows, so that the caller and the called
  10446. +function use different registers for the return value, this macro
  10447. +should recognize only the caller's register numbers.
  10448. +*/
  10449. +/*
  10450. + When returning a value of mode DImode, r11:r10 is used, else r12 is used.
  10451. +*/
  10452. +#define FUNCTION_VALUE_REGNO_P(REGNO) ((REGNO) == RET_REGISTER \
  10453. + || (REGNO) == INTERNAL_REGNUM(11))
  10454. +
  10455. +
  10456. +/** How Large Values Are Returned **/
  10457. +
  10458. +
  10459. +/*
  10460. +Define this macro to be 1 if all structure and union return values must be
  10461. +in memory. Since this results in slower code, this should be defined
  10462. +only if needed for compatibility with other compilers or with an ABI.
  10463. +If you define this macro to be 0, then the conventions used for structure
  10464. +and union return values are decided by the RETURN_IN_MEMORY macro.
  10465. +
  10466. +If not defined, this defaults to the value 1.
  10467. +*/
  10468. +#define DEFAULT_PCC_STRUCT_RETURN 0
  10469. +
  10470. +
  10471. +
  10472. +
  10473. +/** Generating Code for Profiling **/
  10474. +
  10475. +/*
  10476. +A C statement or compound statement to output to FILE some
  10477. +assembler code to call the profiling subroutine mcount.
  10478. +
  10479. +The details of how mcount expects to be called are determined by
  10480. +your operating system environment, not by GCC. To figure them out,
  10481. +compile a small program for profiling using the system's installed C
  10482. +compiler and look at the assembler code that results.
  10483. +
  10484. +Older implementations of mcount expect the address of a counter
  10485. +variable to be loaded into some register. The name of this variable is
  10486. +'LP' followed by the number LABELNO, so you would generate
  10487. +the name using 'LP%d' in a fprintf.
  10488. +*/
  10489. +/* ToDo: fixme */
  10490. +#ifndef FUNCTION_PROFILER
  10491. +#define FUNCTION_PROFILER(FILE, LABELNO) \
  10492. + fprintf((FILE), "/* profiler %d */", (LABELNO))
  10493. +#endif
  10494. +
  10495. +
  10496. +/*****************************************************************************
  10497. + * Trampolines for Nested Functions *
  10498. + *****************************************************************************/
  10499. +
  10500. +/*
  10501. +A C statement to output, on the stream FILE, assembler code for a
  10502. +block of data that contains the constant parts of a trampoline. This
  10503. +code should not include a label - the label is taken care of
  10504. +automatically.
  10505. +
  10506. +If you do not define this macro, it means no template is needed
  10507. +for the target. Do not define this macro on systems where the block move
  10508. +code to copy the trampoline into place would be larger than the code
  10509. +to generate it on the spot.
  10510. +*/
  10511. +/* ToDo: correct? */
  10512. +#define TRAMPOLINE_TEMPLATE(FILE) avr32_trampoline_template(FILE);
  10513. +
  10514. +
  10515. +/*
  10516. +A C expression for the size in bytes of the trampoline, as an integer.
  10517. +*/
  10518. +/* ToDo: fixme */
  10519. +#define TRAMPOLINE_SIZE 0x0C
  10520. +
  10521. +/*
  10522. +Alignment required for trampolines, in bits.
  10523. +
  10524. +If you don't define this macro, the value of BIGGEST_ALIGNMENT
  10525. +is used for aligning trampolines.
  10526. +*/
  10527. +#define TRAMPOLINE_ALIGNMENT 16
  10528. +
  10529. +/*
  10530. +A C statement to initialize the variable parts of a trampoline.
  10531. +ADDR is an RTX for the address of the trampoline; FNADDR is
  10532. +an RTX for the address of the nested function; STATIC_CHAIN is an
  10533. +RTX for the static chain value that should be passed to the function
  10534. +when it is called.
  10535. +*/
  10536. +#define INITIALIZE_TRAMPOLINE(ADDR, FNADDR, STATIC_CHAIN) \
  10537. + avr32_initialize_trampoline(ADDR, FNADDR, STATIC_CHAIN)
  10538. +
  10539. +
  10540. +/******************************************************************************
  10541. + * Implicit Calls to Library Routines
  10542. + *****************************************************************************/
  10543. +
  10544. +/* Tail calling. */
  10545. +
  10546. +/* A C expression that evaluates to true if it is ok to perform a sibling
  10547. + call to DECL. */
  10548. +#define FUNCTION_OK_FOR_SIBCALL(DECL) 0
  10549. +
  10550. +#define OVERRIDE_OPTIONS avr32_override_options ()
  10551. +
  10552. +#define OPTIMIZATION_OPTIONS(LEVEL, SIZE) avr32_optimization_options (LEVEL, SIZE)
  10553. +
  10554. +/******************************************************************************
  10555. + * Addressing Modes
  10556. + *****************************************************************************/
  10557. +
  10558. +/*
  10559. +A C expression that is nonzero if the machine supports pre-increment,
  10560. +pre-decrement, post-increment, or post-decrement addressing respectively.
  10561. +*/
  10562. +/*
  10563. + AVR32 supports Rp++ and --Rp
  10564. +*/
  10565. +#define HAVE_PRE_INCREMENT 0
  10566. +#define HAVE_PRE_DECREMENT 1
  10567. +#define HAVE_POST_INCREMENT 1
  10568. +#define HAVE_POST_DECREMENT 0
  10569. +
  10570. +/*
  10571. +A C expression that is nonzero if the machine supports pre- or
  10572. +post-address side-effect generation involving constants other than
  10573. +the size of the memory operand.
  10574. +*/
  10575. +#define HAVE_PRE_MODIFY_DISP 0
  10576. +#define HAVE_POST_MODIFY_DISP 0
  10577. +
  10578. +/*
  10579. +A C expression that is nonzero if the machine supports pre- or
  10580. +post-address side-effect generation involving a register displacement.
  10581. +*/
  10582. +#define HAVE_PRE_MODIFY_REG 0
  10583. +#define HAVE_POST_MODIFY_REG 0
  10584. +
  10585. +/*
  10586. +A C expression that is 1 if the RTX X is a constant which
  10587. +is a valid address. On most machines, this can be defined as
  10588. +CONSTANT_P (X), but a few machines are more restrictive
  10589. +in which constant addresses are supported.
  10590. +
  10591. +CONSTANT_P accepts integer-values expressions whose values are
  10592. +not explicitly known, such as symbol_ref, label_ref, and
  10593. +high expressions and const arithmetic expressions, in
  10594. +addition to const_int and const_double expressions.
  10595. +*/
  10596. +#define CONSTANT_ADDRESS_P(X) CONSTANT_P(X)
  10597. +
  10598. +/*
  10599. +A number, the maximum number of registers that can appear in a valid
  10600. +memory address. Note that it is up to you to specify a value equal to
  10601. +the maximum number that GO_IF_LEGITIMATE_ADDRESS would ever
  10602. +accept.
  10603. +*/
  10604. +#define MAX_REGS_PER_ADDRESS 2
  10605. +
  10606. +/*
  10607. +A C compound statement with a conditional goto LABEL;
  10608. +executed if X (an RTX) is a legitimate memory address on the
  10609. +target machine for a memory operand of mode MODE.
  10610. +
  10611. +It usually pays to define several simpler macros to serve as
  10612. +subroutines for this one. Otherwise it may be too complicated to
  10613. +understand.
  10614. +
  10615. +This macro must exist in two variants: a strict variant and a
  10616. +non-strict one. The strict variant is used in the reload pass. It
  10617. +must be defined so that any pseudo-register that has not been
  10618. +allocated a hard register is considered a memory reference. In
  10619. +contexts where some kind of register is required, a pseudo-register
  10620. +with no hard register must be rejected.
  10621. +
  10622. +The non-strict variant is used in other passes. It must be defined to
  10623. +accept all pseudo-registers in every context where some kind of
  10624. +register is required.
  10625. +
  10626. +Compiler source files that want to use the strict variant of this
  10627. +macro define the macro REG_OK_STRICT. You should use an
  10628. +#ifdef REG_OK_STRICT conditional to define the strict variant
  10629. +in that case and the non-strict variant otherwise.
  10630. +
  10631. +Subroutines to check for acceptable registers for various purposes (one
  10632. +for base registers, one for index registers, and so on) are typically
  10633. +among the subroutines used to define GO_IF_LEGITIMATE_ADDRESS.
  10634. +Then only these subroutine macros need have two variants; the higher
  10635. +levels of macros may be the same whether strict or not.
  10636. +
  10637. +Normally, constant addresses which are the sum of a symbol_ref
  10638. +and an integer are stored inside a const RTX to mark them as
  10639. +constant. Therefore, there is no need to recognize such sums
  10640. +specifically as legitimate addresses. Normally you would simply
  10641. +recognize any const as legitimate.
  10642. +
  10643. +Usually PRINT_OPERAND_ADDRESS is not prepared to handle constant
  10644. +sums that are not marked with const. It assumes that a naked
  10645. +plus indicates indexing. If so, then you must reject such
  10646. +naked constant sums as illegitimate addresses, so that none of them will
  10647. +be given to PRINT_OPERAND_ADDRESS.
  10648. +
  10649. +On some machines, whether a symbolic address is legitimate depends on
  10650. +the section that the address refers to. On these machines, define the
  10651. +macro ENCODE_SECTION_INFO to store the information into the
  10652. +symbol_ref, and then check for it here. When you see a
  10653. +const, you will have to look inside it to find the
  10654. +symbol_ref in order to determine the section.
  10655. +
  10656. +The best way to modify the name string is by adding text to the
  10657. +beginning, with suitable punctuation to prevent any ambiguity. Allocate
  10658. +the new name in saveable_obstack. You will have to modify
  10659. +ASM_OUTPUT_LABELREF to remove and decode the added text and
  10660. +output the name accordingly, and define STRIP_NAME_ENCODING to
  10661. +access the original name string.
  10662. +
  10663. +You can check the information stored here into the symbol_ref in
  10664. +the definitions of the macros GO_IF_LEGITIMATE_ADDRESS and
  10665. +PRINT_OPERAND_ADDRESS.
  10666. +*/
  10667. +#ifdef REG_OK_STRICT
  10668. +# define GO_IF_LEGITIMATE_ADDRESS(MODE, X, LABEL) \
  10669. + do \
  10670. + { \
  10671. + if (avr32_legitimate_address(MODE, X, 1)) \
  10672. + goto LABEL; \
  10673. + } \
  10674. + while (0)
  10675. +#else
  10676. +# define GO_IF_LEGITIMATE_ADDRESS(MODE, X, LABEL) \
  10677. + do \
  10678. + { \
  10679. + if (avr32_legitimate_address(MODE, X, 0)) \
  10680. + goto LABEL; \
  10681. + } \
  10682. + while (0)
  10683. +#endif
  10684. +
  10685. +
  10686. +
  10687. +/*
  10688. +A C compound statement that attempts to replace X with a valid
  10689. +memory address for an operand of mode MODE. win will be a
  10690. +C statement label elsewhere in the code; the macro definition may use
  10691. +
  10692. + GO_IF_LEGITIMATE_ADDRESS (MODE, X, WIN);
  10693. +
  10694. +to avoid further processing if the address has become legitimate.
  10695. +
  10696. +X will always be the result of a call to break_out_memory_refs,
  10697. +and OLDX will be the operand that was given to that function to produce
  10698. +X.
  10699. +
  10700. +The code generated by this macro should not alter the substructure of
  10701. +X. If it transforms X into a more legitimate form, it
  10702. +should assign X (which will always be a C variable) a new value.
  10703. +
  10704. +It is not necessary for this macro to come up with a legitimate
  10705. +address. The compiler has standard ways of doing so in all cases. In
  10706. +fact, it is safe for this macro to do nothing. But often a
  10707. +machine-dependent strategy can generate better code.
  10708. +*/
  10709. +#define LEGITIMIZE_ADDRESS(X, OLDX, MODE, WIN) \
  10710. + do \
  10711. + { \
  10712. + if (GET_CODE(X) == PLUS \
  10713. + && GET_CODE(XEXP(X, 0)) == REG \
  10714. + && GET_CODE(XEXP(X, 1)) == CONST_INT \
  10715. + && !CONST_OK_FOR_CONSTRAINT_P(INTVAL(XEXP(X, 1)), \
  10716. + 'K', "Ks16")) \
  10717. + { \
  10718. + rtx index = force_reg(SImode, XEXP(X, 1)); \
  10719. + X = gen_rtx_PLUS( SImode, XEXP(X, 0), index); \
  10720. + } \
  10721. + GO_IF_LEGITIMATE_ADDRESS(MODE, X, WIN); \
  10722. + } \
  10723. + while(0)
  10724. +
  10725. +
  10726. +/*
  10727. +A C statement or compound statement with a conditional
  10728. +goto LABEL; executed if memory address X (an RTX) can have
  10729. +different meanings depending on the machine mode of the memory
  10730. +reference it is used for or if the address is valid for some modes
  10731. +but not others.
  10732. +
  10733. +Autoincrement and autodecrement addresses typically have mode-dependent
  10734. +effects because the amount of the increment or decrement is the size
  10735. +of the operand being addressed. Some machines have other mode-dependent
  10736. +addresses. Many RISC machines have no mode-dependent addresses.
  10737. +
  10738. +You may assume that ADDR is a valid address for the machine.
  10739. +*/
  10740. +#define GO_IF_MODE_DEPENDENT_ADDRESS(ADDR, LABEL) \
  10741. + do \
  10742. + { \
  10743. + if (GET_CODE (ADDR) == POST_INC \
  10744. + || GET_CODE (ADDR) == PRE_DEC) \
  10745. + goto LABEL; \
  10746. + } \
  10747. + while (0)
  10748. +
  10749. +/*
  10750. +A C expression that is nonzero if X is a legitimate constant for
  10751. +an immediate operand on the target machine. You can assume that
  10752. +X satisfies CONSTANT_P, so you need not check this. In fact,
  10753. +'1' is a suitable definition for this macro on machines where
  10754. +anything CONSTANT_P is valid.
  10755. +*/
  10756. +#define LEGITIMATE_CONSTANT_P(X) avr32_legitimate_constant_p(X)
  10757. +
  10758. +
  10759. +/******************************************************************************
  10760. + * Condition Code Status
  10761. + *****************************************************************************/
  10762. +
  10763. +/*
  10764. +C code for a data type which is used for declaring the mdep
  10765. +component of cc_status. It defaults to int.
  10766. +
  10767. +This macro is not used on machines that do not use cc0.
  10768. +*/
  10769. +
  10770. +typedef struct
  10771. +{
  10772. + int flags;
  10773. + rtx value;
  10774. + int cond_exec_cmp_clobbered;
  10775. +} avr32_status_reg;
  10776. +
  10777. +
  10778. +#define CC_STATUS_MDEP avr32_status_reg
  10779. +
  10780. +/*
  10781. +A C expression to initialize the mdep field to "empty".
  10782. +The default definition does nothing, since most machines don't use
  10783. +the field anyway. If you want to use the field, you should probably
  10784. +define this macro to initialize it.
  10785. +
  10786. +This macro is not used on machines that do not use cc0.
  10787. +*/
  10788. +
  10789. +#define CC_STATUS_MDEP_INIT \
  10790. + (cc_status.mdep.flags = CC_NONE , cc_status.mdep.cond_exec_cmp_clobbered = 0, cc_status.mdep.value = 0)
  10791. +
  10792. +/*
  10793. +A C compound statement to set the components of cc_status
  10794. +appropriately for an insn INSN whose body is EXP. It is
  10795. +this macro's responsibility to recognize insns that set the condition
  10796. +code as a byproduct of other activity as well as those that explicitly
  10797. +set (cc0).
  10798. +
  10799. +This macro is not used on machines that do not use cc0.
  10800. +
  10801. +If there are insns that do not set the condition code but do alter
  10802. +other machine registers, this macro must check to see whether they
  10803. +invalidate the expressions that the condition code is recorded as
  10804. +reflecting. For example, on the 68000, insns that store in address
  10805. +registers do not set the condition code, which means that usually
  10806. +NOTICE_UPDATE_CC can leave cc_status unaltered for such
  10807. +insns. But suppose that the previous insn set the condition code
  10808. +based on location 'a4@@(102)' and the current insn stores a new
  10809. +value in 'a4'. Although the condition code is not changed by
  10810. +this, it will no longer be true that it reflects the contents of
  10811. +'a4@@(102)'. Therefore, NOTICE_UPDATE_CC must alter
  10812. +cc_status in this case to say that nothing is known about the
  10813. +condition code value.
  10814. +
  10815. +The definition of NOTICE_UPDATE_CC must be prepared to deal
  10816. +with the results of peephole optimization: insns whose patterns are
  10817. +parallel RTXs containing various reg, mem or
  10818. +constants which are just the operands. The RTL structure of these
  10819. +insns is not sufficient to indicate what the insns actually do. What
  10820. +NOTICE_UPDATE_CC should do when it sees one is just to run
  10821. +CC_STATUS_INIT.
  10822. +
  10823. +A possible definition of NOTICE_UPDATE_CC is to call a function
  10824. +that looks at an attribute (see Insn Attributes) named, for example,
  10825. +'cc'. This avoids having detailed information about patterns in
  10826. +two places, the 'md' file and in NOTICE_UPDATE_CC.
  10827. +*/
  10828. +
  10829. +#define NOTICE_UPDATE_CC(EXP, INSN) avr32_notice_update_cc(EXP, INSN)
  10830. +
  10831. +
  10832. +
  10833. +
  10834. +/******************************************************************************
  10835. + * Describing Relative Costs of Operations
  10836. + *****************************************************************************/
  10837. +
  10838. +
  10839. +
  10840. +/*
  10841. +A C expression for the cost of moving data of mode MODE from a
  10842. +register in class FROM to one in class TO. The classes are
  10843. +expressed using the enumeration values such as GENERAL_REGS. A
  10844. +value of 2 is the default; other values are interpreted relative to
  10845. +that.
  10846. +
  10847. +It is not required that the cost always equal 2 when FROM is the
  10848. +same as TO; on some machines it is expensive to move between
  10849. +registers if they are not general registers.
  10850. +
  10851. +If reload sees an insn consisting of a single set between two
  10852. +hard registers, and if REGISTER_MOVE_COST applied to their
  10853. +classes returns a value of 2, reload does not check to ensure that the
  10854. +constraints of the insn are met. Setting a cost of other than 2 will
  10855. +allow reload to verify that the constraints are met. You should do this
  10856. +if the movm pattern's constraints do not allow such copying.
  10857. +*/
  10858. +#define REGISTER_MOVE_COST(MODE, FROM, TO) \
  10859. + ((GET_MODE_SIZE(MODE) <= 4) ? 2: \
  10860. + (GET_MODE_SIZE(MODE) <= 8) ? 3: \
  10861. + 4)
  10862. +
  10863. +/*
  10864. +A C expression for the cost of moving data of mode MODE between a
  10865. +register of class CLASS and memory; IN is zero if the value
  10866. +is to be written to memory, nonzero if it is to be read in. This cost
  10867. +is relative to those in REGISTER_MOVE_COST. If moving between
  10868. +registers and memory is more expensive than between two registers, you
  10869. +should define this macro to express the relative cost.
  10870. +
  10871. +If you do not define this macro, GCC uses a default cost of 4 plus
  10872. +the cost of copying via a secondary reload register, if one is
  10873. +needed. If your machine requires a secondary reload register to copy
  10874. +between memory and a register of CLASS but the reload mechanism is
  10875. +more complex than copying via an intermediate, define this macro to
  10876. +reflect the actual cost of the move.
  10877. +
  10878. +GCC defines the function memory_move_secondary_cost if
  10879. +secondary reloads are needed. It computes the costs due to copying via
  10880. +a secondary register. If your machine copies from memory using a
  10881. +secondary register in the conventional way but the default base value of
  10882. +4 is not correct for your machine, define this macro to add some other
  10883. +value to the result of that function. The arguments to that function
  10884. +are the same as to this macro.
  10885. +*/
  10886. +/*
  10887. + Memory moves are costly
  10888. +*/
  10889. +#define MEMORY_MOVE_COST(MODE, CLASS, IN) \
  10890. + (((IN) ? ((GET_MODE_SIZE(MODE) < 4) ? 4 : \
  10891. + (GET_MODE_SIZE(MODE) > 8) ? 6 : \
  10892. + 3) \
  10893. + : ((GET_MODE_SIZE(MODE) > 8) ? 6 : 3)))
  10894. +
  10895. +/*
  10896. +A C expression for the cost of a branch instruction. A value of 1 is
  10897. +the default; other values are interpreted relative to that.
  10898. +*/
  10899. + /* Try to use conditionals as much as possible */
  10900. +#define BRANCH_COST(speed_p, predictable_p) (TARGET_BRANCH_PRED ? 3 : 4)
  10901. +
  10902. +/*A C expression for the maximum number of instructions to execute via conditional
  10903. + execution instructions instead of a branch. A value of BRANCH_COST+1 is the default
  10904. + if the machine does not use cc0, and 1 if it does use cc0.*/
  10905. +#define MAX_CONDITIONAL_EXECUTE 4
  10906. +
  10907. +/*
  10908. +Define this macro as a C expression which is nonzero if accessing less
  10909. +than a word of memory (i.e.: a char or a short) is no
  10910. +faster than accessing a word of memory, i.e., if such access
  10911. +require more than one instruction or if there is no difference in cost
  10912. +between byte and (aligned) word loads.
  10913. +
  10914. +When this macro is not defined, the compiler will access a field by
  10915. +finding the smallest containing object; when it is defined, a fullword
  10916. +load will be used if alignment permits. Unless bytes accesses are
  10917. +faster than word accesses, using word accesses is preferable since it
  10918. +may eliminate subsequent memory access if subsequent accesses occur to
  10919. +other fields in the same word of the structure, but to different bytes.
  10920. +*/
  10921. +#define SLOW_BYTE_ACCESS 1
  10922. +
  10923. +
  10924. +/*
  10925. +Define this macro if it is as good or better to call a constant
  10926. +function address than to call an address kept in a register.
  10927. +*/
  10928. +#define NO_FUNCTION_CSE
  10929. +
  10930. +
  10931. +/******************************************************************************
  10932. + * Adjusting the Instruction Scheduler
  10933. + *****************************************************************************/
  10934. +
  10935. +/*****************************************************************************
  10936. + * Dividing the Output into Sections (Texts, Data, ...) *
  10937. + *****************************************************************************/
  10938. +
  10939. +/*
  10940. +A C expression whose value is a string, including spacing, containing the
  10941. +assembler operation that should precede instructions and read-only data.
  10942. +Normally "\t.text" is right.
  10943. +*/
  10944. +#define TEXT_SECTION_ASM_OP "\t.text"
  10945. +/*
  10946. +A C statement that switches to the default section containing instructions.
  10947. +Normally this is not needed, as simply defining TEXT_SECTION_ASM_OP
  10948. +is enough. The MIPS port uses this to sort all functions after all data
  10949. +declarations.
  10950. +*/
  10951. +/* #define TEXT_SECTION */
  10952. +
  10953. +/*
  10954. +A C expression whose value is a string, including spacing, containing the
  10955. +assembler operation to identify the following data as writable initialized
  10956. +data. Normally "\t.data" is right.
  10957. +*/
  10958. +#define DATA_SECTION_ASM_OP "\t.data"
  10959. +
  10960. +/*
  10961. +If defined, a C expression whose value is a string, including spacing,
  10962. +containing the assembler operation to identify the following data as
  10963. +shared data. If not defined, DATA_SECTION_ASM_OP will be used.
  10964. +*/
  10965. +
  10966. +/*
  10967. +A C expression whose value is a string, including spacing, containing
  10968. +the assembler operation to identify the following data as read-only
  10969. +initialized data.
  10970. +*/
  10971. +#undef READONLY_DATA_SECTION_ASM_OP
  10972. +#define READONLY_DATA_SECTION_ASM_OP \
  10973. + ((TARGET_USE_RODATA_SECTION) ? \
  10974. + "\t.section\t.rodata" : \
  10975. + TEXT_SECTION_ASM_OP )
  10976. +
  10977. +
  10978. +/*
  10979. +If defined, a C expression whose value is a string, including spacing,
  10980. +containing the assembler operation to identify the following data as
  10981. +uninitialized global data. If not defined, and neither
  10982. +ASM_OUTPUT_BSS nor ASM_OUTPUT_ALIGNED_BSS are defined,
  10983. +uninitialized global data will be output in the data section if
  10984. +-fno-common is passed, otherwise ASM_OUTPUT_COMMON will be
  10985. +used.
  10986. +*/
  10987. +#define BSS_SECTION_ASM_OP "\t.section\t.bss"
  10988. +
  10989. +/*
  10990. +If defined, a C expression whose value is a string, including spacing,
  10991. +containing the assembler operation to identify the following data as
  10992. +uninitialized global shared data. If not defined, and
  10993. +BSS_SECTION_ASM_OP is, the latter will be used.
  10994. +*/
  10995. +/*#define SHARED_BSS_SECTION_ASM_OP "\trseg\tshared_bbs_section:data:noroot(0)\n"*/
  10996. +/*
  10997. +If defined, a C expression whose value is a string, including spacing,
  10998. +containing the assembler operation to identify the following data as
  10999. +initialization code. If not defined, GCC will assume such a section does
  11000. +not exist.
  11001. +*/
  11002. +#undef INIT_SECTION_ASM_OP
  11003. +#define INIT_SECTION_ASM_OP "\t.section\t.init"
  11004. +
  11005. +/*
  11006. +If defined, a C expression whose value is a string, including spacing,
  11007. +containing the assembler operation to identify the following data as
  11008. +finalization code. If not defined, GCC will assume such a section does
  11009. +not exist.
  11010. +*/
  11011. +#undef FINI_SECTION_ASM_OP
  11012. +#define FINI_SECTION_ASM_OP "\t.section\t.fini"
  11013. +
  11014. +/*
  11015. +If defined, an ASM statement that switches to a different section
  11016. +via SECTION_OP, calls FUNCTION, and switches back to
  11017. +the text section. This is used in crtstuff.c if
  11018. +INIT_SECTION_ASM_OP or FINI_SECTION_ASM_OP to calls
  11019. +to initialization and finalization functions from the init and fini
  11020. +sections. By default, this macro uses a simple function call. Some
  11021. +ports need hand-crafted assembly code to avoid dependencies on
  11022. +registers initialized in the function prologue or to ensure that
  11023. +constant pools don't end up too far way in the text section.
  11024. +*/
  11025. +#define CRT_CALL_STATIC_FUNCTION(SECTION_OP, FUNC) \
  11026. + asm ( SECTION_OP "\n" \
  11027. + "mcall r6[" USER_LABEL_PREFIX #FUNC "@got]\n" \
  11028. + TEXT_SECTION_ASM_OP);
  11029. +
  11030. +
  11031. +/*
  11032. +Define this macro to be an expression with a nonzero value if jump
  11033. +tables (for tablejump insns) should be output in the text
  11034. +section, along with the assembler instructions. Otherwise, the
  11035. +readonly data section is used.
  11036. +
  11037. +This macro is irrelevant if there is no separate readonly data section.
  11038. +*/
  11039. +/* Put jump tables in text section if we have caches. Otherwise assume that
  11040. + loading data from code memory is slow. */
  11041. +#define JUMP_TABLES_IN_TEXT_SECTION \
  11042. + (TARGET_CACHES ? 1 : 0)
  11043. +
  11044. +
  11045. +/******************************************************************************
  11046. + * Position Independent Code (PIC)
  11047. + *****************************************************************************/
  11048. +
  11049. +#ifndef AVR32_ALWAYS_PIC
  11050. +#define AVR32_ALWAYS_PIC 0
  11051. +#endif
  11052. +
  11053. +/* GOT is set to r6 */
  11054. +#define PIC_OFFSET_TABLE_REGNUM INTERNAL_REGNUM(6)
  11055. +
  11056. +/*
  11057. +A C expression that is nonzero if X is a legitimate immediate
  11058. +operand on the target machine when generating position independent code.
  11059. +You can assume that X satisfies CONSTANT_P, so you need not
  11060. +check this. You can also assume flag_pic is true, so you need not
  11061. +check it either. You need not define this macro if all constants
  11062. +(including SYMBOL_REF) can be immediate operands when generating
  11063. +position independent code.
  11064. +*/
  11065. +/* We can't directly access anything that contains a symbol,
  11066. + nor can we indirect via the constant pool. */
  11067. +#define LEGITIMATE_PIC_OPERAND_P(X) avr32_legitimate_pic_operand_p(X)
  11068. +
  11069. +
  11070. +/* We need to know when we are making a constant pool; this determines
  11071. + whether data needs to be in the GOT or can be referenced via a GOT
  11072. + offset. */
  11073. +extern int making_const_table;
  11074. +
  11075. +/******************************************************************************
  11076. + * Defining the Output Assembler Language
  11077. + *****************************************************************************/
  11078. +
  11079. +
  11080. +/*
  11081. +A C string constant describing how to begin a comment in the target
  11082. +assembler language. The compiler assumes that the comment will end at
  11083. +the end of the line.
  11084. +*/
  11085. +#define ASM_COMMENT_START "# "
  11086. +
  11087. +/*
  11088. +A C string constant for text to be output before each asm
  11089. +statement or group of consecutive ones. Normally this is
  11090. +"#APP", which is a comment that has no effect on most
  11091. +assemblers but tells the GNU assembler that it must check the lines
  11092. +that follow for all valid assembler constructs.
  11093. +*/
  11094. +#undef ASM_APP_ON
  11095. +#define ASM_APP_ON "#APP\n"
  11096. +
  11097. +/*
  11098. +A C string constant for text to be output after each asm
  11099. +statement or group of consecutive ones. Normally this is
  11100. +"#NO_APP", which tells the GNU assembler to resume making the
  11101. +time-saving assumptions that are valid for ordinary compiler output.
  11102. +*/
  11103. +#undef ASM_APP_OFF
  11104. +#define ASM_APP_OFF "#NO_APP\n"
  11105. +
  11106. +
  11107. +
  11108. +#define FILE_ASM_OP "\t.file\n"
  11109. +#define IDENT_ASM_OP "\t.ident\t"
  11110. +#define SET_ASM_OP "\t.set\t"
  11111. +
  11112. +
  11113. +/*
  11114. + * Output assembly directives to switch to section name. The section
  11115. + * should have attributes as specified by flags, which is a bit mask
  11116. + * of the SECTION_* flags defined in 'output.h'. If align is nonzero,
  11117. + * it contains an alignment in bytes to be used for the section,
  11118. + * otherwise some target default should be used. Only targets that
  11119. + * must specify an alignment within the section directive need pay
  11120. + * attention to align -- we will still use ASM_OUTPUT_ALIGN.
  11121. + *
  11122. + * NOTE: This one must not be moved to avr32.c
  11123. + */
  11124. +#undef TARGET_ASM_NAMED_SECTION
  11125. +#define TARGET_ASM_NAMED_SECTION default_elf_asm_named_section
  11126. +
  11127. +
  11128. +/*
  11129. +You may define this macro as a C expression. You should define the
  11130. +expression to have a nonzero value if GCC should output the constant
  11131. +pool for a function before the code for the function, or a zero value if
  11132. +GCC should output the constant pool after the function. If you do
  11133. +not define this macro, the usual case, GCC will output the constant
  11134. +pool before the function.
  11135. +*/
  11136. +#define CONSTANT_POOL_BEFORE_FUNCTION 0
  11137. +
  11138. +
  11139. +/*
  11140. +Define this macro as a C expression which is nonzero if the constant
  11141. +EXP, of type tree, should be output after the code for a
  11142. +function. The compiler will normally output all constants before the
  11143. +function; you need not define this macro if this is OK.
  11144. +*/
  11145. +#define CONSTANT_AFTER_FUNCTION_P(EXP) 1
  11146. +
  11147. +
  11148. +/*
  11149. +Define this macro as a C expression which is nonzero if C is
  11150. +as a logical line separator by the assembler. STR points to the
  11151. +position in the string where C was found; this can be used if a
  11152. +line separator uses multiple characters.
  11153. +
  11154. +If you do not define this macro, the default is that only
  11155. +the character ';' is treated as a logical line separator.
  11156. +*/
  11157. +#define IS_ASM_LOGICAL_LINE_SEPARATOR(C,STR) (((C) == '\n') || ((C) == ';'))
  11158. +
  11159. +
  11160. +/** Output of Uninitialized Variables **/
  11161. +
  11162. +/*
  11163. +A C statement (sans semicolon) to output to the stdio stream
  11164. +STREAM the assembler definition of a common-label named
  11165. +NAME whose size is SIZE bytes. The variable ROUNDED
  11166. +is the size rounded up to whatever alignment the caller wants.
  11167. +
  11168. +Use the expression assemble_name(STREAM, NAME) to
  11169. +output the name itself; before and after that, output the additional
  11170. +assembler syntax for defining the name, and a newline.
  11171. +
  11172. +This macro controls how the assembler definitions of uninitialized
  11173. +common global variables are output.
  11174. +*/
  11175. +/*
  11176. +#define ASM_OUTPUT_COMMON(STREAM, NAME, SIZE, ROUNDED) \
  11177. + avr32_asm_output_common(STREAM, NAME, SIZE, ROUNDED)
  11178. +*/
  11179. +
  11180. +#define ASM_OUTPUT_COMMON(FILE, NAME, SIZE, ROUNDED) \
  11181. + do \
  11182. + { \
  11183. + fputs ("\t.comm ", (FILE)); \
  11184. + assemble_name ((FILE), (NAME)); \
  11185. + fprintf ((FILE), ",%d\n", (SIZE)); \
  11186. + } \
  11187. + while (0)
  11188. +
  11189. +/*
  11190. + * Like ASM_OUTPUT_BSS except takes the required alignment as a
  11191. + * separate, explicit argument. If you define this macro, it is used
  11192. + * in place of ASM_OUTPUT_BSS, and gives you more flexibility in
  11193. + * handling the required alignment of the variable. The alignment is
  11194. + * specified as the number of bits.
  11195. + *
  11196. + * Try to use function asm_output_aligned_bss defined in file varasm.c
  11197. + * when defining this macro.
  11198. + */
  11199. +#define ASM_OUTPUT_ALIGNED_BSS(STREAM, DECL, NAME, SIZE, ALIGNMENT) \
  11200. + asm_output_aligned_bss (STREAM, DECL, NAME, SIZE, ALIGNMENT)
  11201. +
  11202. +/*
  11203. +A C statement (sans semicolon) to output to the stdio stream
  11204. +STREAM the assembler definition of a local-common-label named
  11205. +NAME whose size is SIZE bytes. The variable ROUNDED
  11206. +is the size rounded up to whatever alignment the caller wants.
  11207. +
  11208. +Use the expression assemble_name(STREAM, NAME) to
  11209. +output the name itself; before and after that, output the additional
  11210. +assembler syntax for defining the name, and a newline.
  11211. +
  11212. +This macro controls how the assembler definitions of uninitialized
  11213. +static variables are output.
  11214. +*/
  11215. +#define ASM_OUTPUT_LOCAL(FILE, NAME, SIZE, ROUNDED) \
  11216. + do \
  11217. + { \
  11218. + fputs ("\t.lcomm ", (FILE)); \
  11219. + assemble_name ((FILE), (NAME)); \
  11220. + fprintf ((FILE), ",%d, %d\n", (SIZE), 2); \
  11221. + } \
  11222. + while (0)
  11223. +
  11224. +
  11225. +/*
  11226. +A C statement (sans semicolon) to output to the stdio stream
  11227. +STREAM the assembler definition of a label named NAME.
  11228. +Use the expression assemble_name(STREAM, NAME) to
  11229. +output the name itself; before and after that, output the additional
  11230. +assembler syntax for defining the name, and a newline.
  11231. +*/
  11232. +#define ASM_OUTPUT_LABEL(STREAM, NAME) avr32_asm_output_label(STREAM, NAME)
  11233. +
  11234. +/* A C string containing the appropriate assembler directive to
  11235. + * specify the size of a symbol, without any arguments. On systems
  11236. + * that use ELF, the default (in 'config/elfos.h') is '"\t.size\t"';
  11237. + * on other systems, the default is not to define this macro.
  11238. + *
  11239. + * Define this macro only if it is correct to use the default
  11240. + * definitions of ASM_ OUTPUT_SIZE_DIRECTIVE and
  11241. + * ASM_OUTPUT_MEASURED_SIZE for your system. If you need your own
  11242. + * custom definitions of those macros, or if you do not need explicit
  11243. + * symbol sizes at all, do not define this macro.
  11244. + */
  11245. +#define SIZE_ASM_OP "\t.size\t"
  11246. +
  11247. +
  11248. +/*
  11249. +A C statement (sans semicolon) to output to the stdio stream
  11250. +STREAM some commands that will make the label NAME global;
  11251. +that is, available for reference from other files. Use the expression
  11252. +assemble_name(STREAM, NAME) to output the name
  11253. +itself; before and after that, output the additional assembler syntax
  11254. +for making that name global, and a newline.
  11255. +*/
  11256. +#define GLOBAL_ASM_OP "\t.global\t"
  11257. +
  11258. +
  11259. +
  11260. +/*
  11261. +A C expression which evaluates to true if the target supports weak symbols.
  11262. +
  11263. +If you don't define this macro, defaults.h provides a default
  11264. +definition. If either ASM_WEAKEN_LABEL or ASM_WEAKEN_DECL
  11265. +is defined, the default definition is '1'; otherwise, it is
  11266. +'0'. Define this macro if you want to control weak symbol support
  11267. +with a compiler flag such as -melf.
  11268. +*/
  11269. +#define SUPPORTS_WEAK 1
  11270. +
  11271. +/*
  11272. +A C statement (sans semicolon) to output to the stdio stream
  11273. +STREAM a reference in assembler syntax to a label named
  11274. +NAME. This should add '_' to the front of the name, if that
  11275. +is customary on your operating system, as it is in most Berkeley Unix
  11276. +systems. This macro is used in assemble_name.
  11277. +*/
  11278. +#define ASM_OUTPUT_LABELREF(STREAM, NAME) \
  11279. + avr32_asm_output_labelref(STREAM, NAME)
  11280. +
  11281. +
  11282. +
  11283. +/*
  11284. +A C expression to assign to OUTVAR (which is a variable of type
  11285. +char *) a newly allocated string made from the string
  11286. +NAME and the number NUMBER, with some suitable punctuation
  11287. +added. Use alloca to get space for the string.
  11288. +
  11289. +The string will be used as an argument to ASM_OUTPUT_LABELREF to
  11290. +produce an assembler label for an internal static variable whose name is
  11291. +NAME. Therefore, the string must be such as to result in valid
  11292. +assembler code. The argument NUMBER is different each time this
  11293. +macro is executed; it prevents conflicts between similarly-named
  11294. +internal static variables in different scopes.
  11295. +
  11296. +Ideally this string should not be a valid C identifier, to prevent any
  11297. +conflict with the user's own symbols. Most assemblers allow periods
  11298. +or percent signs in assembler symbols; putting at least one of these
  11299. +between the name and the number will suffice.
  11300. +*/
  11301. +#define ASM_FORMAT_PRIVATE_NAME(OUTVAR, NAME, NUMBER) \
  11302. + do \
  11303. + { \
  11304. + (OUTVAR) = (char *) alloca (strlen ((NAME)) + 10); \
  11305. + sprintf ((OUTVAR), "%s.%d", (NAME), (NUMBER)); \
  11306. + } \
  11307. + while (0)
  11308. +
  11309. +
  11310. +/** Macros Controlling Initialization Routines **/
  11311. +
  11312. +
  11313. +/*
  11314. +If defined, main will not call __main as described above.
  11315. +This macro should be defined for systems that control start-up code
  11316. +on a symbol-by-symbol basis, such as OSF/1, and should not
  11317. +be defined explicitly for systems that support INIT_SECTION_ASM_OP.
  11318. +*/
  11319. +/*
  11320. + __main is not defined when debugging.
  11321. +*/
  11322. +#define HAS_INIT_SECTION
  11323. +
  11324. +
  11325. +/** Output of Assembler Instructions **/
  11326. +
  11327. +/*
  11328. +A C initializer containing the assembler's names for the machine
  11329. +registers, each one as a C string constant. This is what translates
  11330. +register numbers in the compiler into assembler language.
  11331. +*/
  11332. +
  11333. +#define REGISTER_NAMES \
  11334. +{ \
  11335. + "pc", "lr", \
  11336. + "sp", "r12", \
  11337. + "r11", "r10", \
  11338. + "r9", "r8", \
  11339. + "r7", "r6", \
  11340. + "r5", "r4", \
  11341. + "r3", "r2", \
  11342. + "r1", "r0", \
  11343. +}
  11344. +
  11345. +/*
  11346. +A C compound statement to output to stdio stream STREAM the
  11347. +assembler syntax for an instruction operand X. X is an
  11348. +RTL expression.
  11349. +
  11350. +CODE is a value that can be used to specify one of several ways
  11351. +of printing the operand. It is used when identical operands must be
  11352. +printed differently depending on the context. CODE comes from
  11353. +the '%' specification that was used to request printing of the
  11354. +operand. If the specification was just '%digit' then
  11355. +CODE is 0; if the specification was '%ltr digit'
  11356. +then CODE is the ASCII code for ltr.
  11357. +
  11358. +If X is a register, this macro should print the register's name.
  11359. +The names can be found in an array reg_names whose type is
  11360. +char *[]. reg_names is initialized from REGISTER_NAMES.
  11361. +
  11362. +When the machine description has a specification '%punct'
  11363. +(a '%' followed by a punctuation character), this macro is called
  11364. +with a null pointer for X and the punctuation character for
  11365. +CODE.
  11366. +*/
  11367. +#define PRINT_OPERAND(STREAM, X, CODE) avr32_print_operand(STREAM, X, CODE)
  11368. +
  11369. +/* A C statement to be executed just prior to the output of
  11370. + assembler code for INSN, to modify the extracted operands so
  11371. + they will be output differently.
  11372. +
  11373. + Here the argument OPVEC is the vector containing the operands
  11374. + extracted from INSN, and NOPERANDS is the number of elements of
  11375. + the vector which contain meaningful data for this insn.
  11376. + The contents of this vector are what will be used to convert the insn
  11377. + template into assembler code, so you can change the assembler output
  11378. + by changing the contents of the vector. */
  11379. +#define FINAL_PRESCAN_INSN(INSN, OPVEC, NOPERANDS) \
  11380. + avr32_final_prescan_insn ((INSN), (OPVEC), (NOPERANDS))
  11381. +
  11382. +/*
  11383. +A C expression which evaluates to true if CODE is a valid
  11384. +punctuation character for use in the PRINT_OPERAND macro. If
  11385. +PRINT_OPERAND_PUNCT_VALID_P is not defined, it means that no
  11386. +punctuation characters (except for the standard one, '%') are used
  11387. +in this way.
  11388. +*/
  11389. +#define PRINT_OPERAND_PUNCT_VALID_P(CODE) \
  11390. + (((CODE) == '?') \
  11391. + || ((CODE) == '!'))
  11392. +
  11393. +/*
  11394. +A C compound statement to output to stdio stream STREAM the
  11395. +assembler syntax for an instruction operand that is a memory reference
  11396. +whose address is X. X is an RTL expression.
  11397. +
  11398. +On some machines, the syntax for a symbolic address depends on the
  11399. +section that the address refers to. On these machines, define the macro
  11400. +ENCODE_SECTION_INFO to store the information into the
  11401. +symbol_ref, and then check for it here. (see Assembler Format.)
  11402. +*/
  11403. +#define PRINT_OPERAND_ADDRESS(STREAM, X) avr32_print_operand_address(STREAM, X)
  11404. +
  11405. +
  11406. +/** Output of Dispatch Tables **/
  11407. +
  11408. +/*
  11409. + * A C statement to output to the stdio stream stream an assembler
  11410. + * pseudo-instruction to generate a difference between two
  11411. + * labels. value and rel are the numbers of two internal labels. The
  11412. + * definitions of these labels are output using
  11413. + * (*targetm.asm_out.internal_label), and they must be printed in the
  11414. + * same way here. For example,
  11415. + *
  11416. + * fprintf (stream, "\t.word L%d-L%d\n",
  11417. + * value, rel)
  11418. + *
  11419. + * You must provide this macro on machines where the addresses in a
  11420. + * dispatch table are relative to the table's own address. If defined,
  11421. + * GCC will also use this macro on all machines when producing
  11422. + * PIC. body is the body of the ADDR_DIFF_VEC; it is provided so that
  11423. + * the mode and flags can be read.
  11424. + */
  11425. +#define ASM_OUTPUT_ADDR_DIFF_ELT(STREAM, BODY, VALUE, REL) \
  11426. + fprintf(STREAM, "\tbral\t%sL%d\n", LOCAL_LABEL_PREFIX, VALUE)
  11427. +
  11428. +/*
  11429. +This macro should be provided on machines where the addresses
  11430. +in a dispatch table are absolute.
  11431. +
  11432. +The definition should be a C statement to output to the stdio stream
  11433. +STREAM an assembler pseudo-instruction to generate a reference to
  11434. +a label. VALUE is the number of an internal label whose
  11435. +definition is output using ASM_OUTPUT_INTERNAL_LABEL.
  11436. +For example,
  11437. +
  11438. +fprintf(STREAM, "\t.word L%d\n", VALUE)
  11439. +*/
  11440. +
  11441. +#define ASM_OUTPUT_ADDR_VEC_ELT(STREAM, VALUE) \
  11442. + fprintf(STREAM, "\t.long %sL%d\n", LOCAL_LABEL_PREFIX, VALUE)
  11443. +
  11444. +/** Assembler Commands for Exception Regions */
  11445. +
  11446. +/* ToDo: All of this subsection */
  11447. +
  11448. +/** Assembler Commands for Alignment */
  11449. +
  11450. +
  11451. +/*
  11452. +A C statement to output to the stdio stream STREAM an assembler
  11453. +command to advance the location counter to a multiple of 2 to the
  11454. +POWER bytes. POWER will be a C expression of type int.
  11455. +*/
  11456. +#define ASM_OUTPUT_ALIGN(STREAM, POWER) \
  11457. + do \
  11458. + { \
  11459. + if ((POWER) != 0) \
  11460. + fprintf(STREAM, "\t.align\t%d\n", POWER); \
  11461. + } \
  11462. + while (0)
  11463. +
  11464. +/*
  11465. +Like ASM_OUTPUT_ALIGN, except that the \nop" instruction is used for padding, if
  11466. +necessary.
  11467. +*/
  11468. +#define ASM_OUTPUT_ALIGN_WITH_NOP(STREAM, POWER) \
  11469. + fprintf(STREAM, "\t.balignw\t%d, 0xd703\n", (1 << POWER))
  11470. +
  11471. +
  11472. +
  11473. +/******************************************************************************
  11474. + * Controlling Debugging Information Format
  11475. + *****************************************************************************/
  11476. +
  11477. +/* How to renumber registers for dbx and gdb. */
  11478. +#define DBX_REGISTER_NUMBER(REGNO) ASM_REGNUM (REGNO)
  11479. +
  11480. +/* The DWARF 2 CFA column which tracks the return address. */
  11481. +#define DWARF_FRAME_RETURN_COLUMN DWARF_FRAME_REGNUM(LR_REGNUM)
  11482. +
  11483. +/*
  11484. +Define this macro if GCC should produce dwarf version 2 format
  11485. +debugging output in response to the -g option.
  11486. +
  11487. +To support optional call frame debugging information, you must also
  11488. +define INCOMING_RETURN_ADDR_RTX and either set
  11489. +RTX_FRAME_RELATED_P on the prologue insns if you use RTL for the
  11490. +prologue, or call dwarf2out_def_cfa and dwarf2out_reg_save
  11491. +as appropriate from TARGET_ASM_FUNCTION_PROLOGUE if you don't.
  11492. +*/
  11493. +#define DWARF2_DEBUGGING_INFO 1
  11494. +
  11495. +
  11496. +#define DWARF2_ASM_LINE_DEBUG_INFO 1
  11497. +#define DWARF2_FRAME_INFO 1
  11498. +
  11499. +
  11500. +/******************************************************************************
  11501. + * Miscellaneous Parameters
  11502. + *****************************************************************************/
  11503. +
  11504. +/* ToDo: a lot */
  11505. +
  11506. +/*
  11507. +An alias for a machine mode name. This is the machine mode that
  11508. +elements of a jump-table should have.
  11509. +*/
  11510. +#define CASE_VECTOR_MODE SImode
  11511. +
  11512. +/*
  11513. +Define this macro to be a C expression to indicate when jump-tables
  11514. +should contain relative addresses. If jump-tables never contain
  11515. +relative addresses, then you need not define this macro.
  11516. +*/
  11517. +#define CASE_VECTOR_PC_RELATIVE 0
  11518. +
  11519. +/* Increase the threshold for using table jumps on the UC arch. */
  11520. +#define CASE_VALUES_THRESHOLD (TARGET_BRANCH_PRED ? 4 : 7)
  11521. +
  11522. +/*
  11523. +The maximum number of bytes that a single instruction can move quickly
  11524. +between memory and registers or between two memory locations.
  11525. +*/
  11526. +#define MOVE_MAX (2*UNITS_PER_WORD)
  11527. +
  11528. +
  11529. +/* A C expression that is nonzero if on this machine the number of bits actually used
  11530. + for the count of a shift operation is equal to the number of bits needed to represent
  11531. + the size of the object being shifted. When this macro is nonzero, the compiler will
  11532. + assume that it is safe to omit a sign-extend, zero-extend, and certain bitwise 'and'
  11533. + instructions that truncates the count of a shift operation. On machines that have
  11534. + instructions that act on bit-fields at variable positions, which may include 'bit test'
  11535. + 378 GNU Compiler Collection (GCC) Internals
  11536. + instructions, a nonzero SHIFT_COUNT_TRUNCATED also enables deletion of truncations
  11537. + of the values that serve as arguments to bit-field instructions.
  11538. + If both types of instructions truncate the count (for shifts) and position (for bit-field
  11539. + operations), or if no variable-position bit-field instructions exist, you should define
  11540. + this macro.
  11541. + However, on some machines, such as the 80386 and the 680x0, truncation only applies
  11542. + to shift operations and not the (real or pretended) bit-field operations. Define SHIFT_
  11543. + COUNT_TRUNCATED to be zero on such machines. Instead, add patterns to the 'md' file
  11544. + that include the implied truncation of the shift instructions.
  11545. + You need not de ne this macro if it would always have the value of zero. */
  11546. +#define SHIFT_COUNT_TRUNCATED 1
  11547. +
  11548. +/*
  11549. +A C expression which is nonzero if on this machine it is safe to
  11550. +convert an integer of INPREC bits to one of OUTPREC
  11551. +bits (where OUTPREC is smaller than INPREC) by merely
  11552. +operating on it as if it had only OUTPREC bits.
  11553. +
  11554. +On many machines, this expression can be 1.
  11555. +
  11556. +When TRULY_NOOP_TRUNCATION returns 1 for a pair of sizes for
  11557. +modes for which MODES_TIEABLE_P is 0, suboptimal code can result.
  11558. +If this is the case, making TRULY_NOOP_TRUNCATION return 0 in
  11559. +such cases may improve things.
  11560. +*/
  11561. +#define TRULY_NOOP_TRUNCATION(OUTPREC, INPREC) 1
  11562. +
  11563. +/*
  11564. +An alias for the machine mode for pointers. On most machines, define
  11565. +this to be the integer mode corresponding to the width of a hardware
  11566. +pointer; SImode on 32-bit machine or DImode on 64-bit machines.
  11567. +On some machines you must define this to be one of the partial integer
  11568. +modes, such as PSImode.
  11569. +
  11570. +The width of Pmode must be at least as large as the value of
  11571. +POINTER_SIZE. If it is not equal, you must define the macro
  11572. +POINTERS_EXTEND_UNSIGNED to specify how pointers are extended
  11573. +to Pmode.
  11574. +*/
  11575. +#define Pmode SImode
  11576. +
  11577. +/*
  11578. +An alias for the machine mode used for memory references to functions
  11579. +being called, in call RTL expressions. On most machines this
  11580. +should be QImode.
  11581. +*/
  11582. +#define FUNCTION_MODE SImode
  11583. +
  11584. +
  11585. +#define REG_S_P(x) \
  11586. + (REG_P (x) || (GET_CODE (x) == SUBREG && REG_P (XEXP (x, 0))))
  11587. +
  11588. +
  11589. +/* If defined, modifies the length assigned to instruction INSN as a
  11590. + function of the context in which it is used. LENGTH is an lvalue
  11591. + that contains the initially computed length of the insn and should
  11592. + be updated with the correct length of the insn. */
  11593. +#define ADJUST_INSN_LENGTH(INSN, LENGTH) \
  11594. + ((LENGTH) = avr32_adjust_insn_length ((INSN), (LENGTH)))
  11595. +
  11596. +
  11597. +#define CLZ_DEFINED_VALUE_AT_ZERO(mode, value) \
  11598. + (value = 32, (mode == SImode))
  11599. +
  11600. +#define CTZ_DEFINED_VALUE_AT_ZERO(mode, value) \
  11601. + (value = 32, (mode == SImode))
  11602. +
  11603. +#define UNITS_PER_SIMD_WORD(mode) UNITS_PER_WORD
  11604. +
  11605. +#define STORE_FLAG_VALUE 1
  11606. +
  11607. +
  11608. +/* IF-conversion macros. */
  11609. +#define IFCVT_MODIFY_INSN( CE_INFO, PATTERN, INSN ) \
  11610. + { \
  11611. + (PATTERN) = avr32_ifcvt_modify_insn (CE_INFO, PATTERN, INSN, &num_true_changes); \
  11612. + }
  11613. +
  11614. +#define IFCVT_EXTRA_FIELDS \
  11615. + int num_cond_clobber_insns; \
  11616. + int num_extra_move_insns; \
  11617. + rtx extra_move_insns[MAX_CONDITIONAL_EXECUTE]; \
  11618. + rtx moved_insns[MAX_CONDITIONAL_EXECUTE];
  11619. +
  11620. +#define IFCVT_INIT_EXTRA_FIELDS( CE_INFO ) \
  11621. + { \
  11622. + (CE_INFO)->num_cond_clobber_insns = 0; \
  11623. + (CE_INFO)->num_extra_move_insns = 0; \
  11624. + }
  11625. +
  11626. +
  11627. +#define IFCVT_MODIFY_CANCEL( CE_INFO ) avr32_ifcvt_modify_cancel (CE_INFO, &num_true_changes)
  11628. +
  11629. +#define IFCVT_ALLOW_MODIFY_TEST_IN_INSN 1
  11630. +#define IFCVT_COND_EXEC_BEFORE_RELOAD (TARGET_COND_EXEC_BEFORE_RELOAD)
  11631. +
  11632. +enum avr32_builtins
  11633. +{
  11634. + AVR32_BUILTIN_MTSR,
  11635. + AVR32_BUILTIN_MFSR,
  11636. + AVR32_BUILTIN_MTDR,
  11637. + AVR32_BUILTIN_MFDR,
  11638. + AVR32_BUILTIN_CACHE,
  11639. + AVR32_BUILTIN_SYNC,
  11640. + AVR32_BUILTIN_SSRF,
  11641. + AVR32_BUILTIN_CSRF,
  11642. + AVR32_BUILTIN_TLBR,
  11643. + AVR32_BUILTIN_TLBS,
  11644. + AVR32_BUILTIN_TLBW,
  11645. + AVR32_BUILTIN_BREAKPOINT,
  11646. + AVR32_BUILTIN_XCHG,
  11647. + AVR32_BUILTIN_LDXI,
  11648. + AVR32_BUILTIN_BSWAP16,
  11649. + AVR32_BUILTIN_BSWAP32,
  11650. + AVR32_BUILTIN_COP,
  11651. + AVR32_BUILTIN_MVCR_W,
  11652. + AVR32_BUILTIN_MVRC_W,
  11653. + AVR32_BUILTIN_MVCR_D,
  11654. + AVR32_BUILTIN_MVRC_D,
  11655. + AVR32_BUILTIN_MULSATHH_H,
  11656. + AVR32_BUILTIN_MULSATHH_W,
  11657. + AVR32_BUILTIN_MULSATRNDHH_H,
  11658. + AVR32_BUILTIN_MULSATRNDWH_W,
  11659. + AVR32_BUILTIN_MULSATWH_W,
  11660. + AVR32_BUILTIN_MACSATHH_W,
  11661. + AVR32_BUILTIN_SATADD_H,
  11662. + AVR32_BUILTIN_SATSUB_H,
  11663. + AVR32_BUILTIN_SATADD_W,
  11664. + AVR32_BUILTIN_SATSUB_W,
  11665. + AVR32_BUILTIN_MULWH_D,
  11666. + AVR32_BUILTIN_MULNWH_D,
  11667. + AVR32_BUILTIN_MACWH_D,
  11668. + AVR32_BUILTIN_MACHH_D,
  11669. + AVR32_BUILTIN_MUSFR,
  11670. + AVR32_BUILTIN_MUSTR,
  11671. + AVR32_BUILTIN_SATS,
  11672. + AVR32_BUILTIN_SATU,
  11673. + AVR32_BUILTIN_SATRNDS,
  11674. + AVR32_BUILTIN_SATRNDU,
  11675. + AVR32_BUILTIN_MEMS,
  11676. + AVR32_BUILTIN_MEMC,
  11677. + AVR32_BUILTIN_MEMT,
  11678. + AVR32_BUILTIN_SLEEP,
  11679. + AVR32_BUILTIN_DELAY_CYCLES
  11680. +};
  11681. +
  11682. +
  11683. +#define FLOAT_LIB_COMPARE_RETURNS_BOOL(MODE, COMPARISON) \
  11684. + ((MODE == SFmode) || (MODE == DFmode))
  11685. +
  11686. +#define RENAME_LIBRARY_SET ".set"
  11687. +
  11688. +/* Make ABI_NAME an alias for __GCC_NAME. */
  11689. +#define RENAME_LIBRARY(GCC_NAME, ABI_NAME) \
  11690. + __asm__ (".globl\t__avr32_" #ABI_NAME "\n" \
  11691. + ".set\t__avr32_" #ABI_NAME \
  11692. + ", __" #GCC_NAME "\n");
  11693. +
  11694. +/* Give libgcc functions avr32 ABI name. */
  11695. +#ifdef L_muldi3
  11696. +#define DECLARE_LIBRARY_RENAMES RENAME_LIBRARY (muldi3, mul64)
  11697. +#endif
  11698. +#ifdef L_divdi3
  11699. +#define DECLARE_LIBRARY_RENAMES RENAME_LIBRARY (divdi3, sdiv64)
  11700. +#endif
  11701. +#ifdef L_udivdi3
  11702. +#define DECLARE_LIBRARY_RENAMES RENAME_LIBRARY (udivdi3, udiv64)
  11703. +#endif
  11704. +#ifdef L_moddi3
  11705. +#define DECLARE_LIBRARY_RENAMES RENAME_LIBRARY (moddi3, smod64)
  11706. +#endif
  11707. +#ifdef L_umoddi3
  11708. +#define DECLARE_LIBRARY_RENAMES RENAME_LIBRARY (umoddi3, umod64)
  11709. +#endif
  11710. +#ifdef L_ashldi3
  11711. +#define DECLARE_LIBRARY_RENAMES RENAME_LIBRARY (ashldi3, lsl64)
  11712. +#endif
  11713. +#ifdef L_lshrdi3
  11714. +#define DECLARE_LIBRARY_RENAMES RENAME_LIBRARY (lshrdi3, lsr64)
  11715. +#endif
  11716. +#ifdef L_ashrdi3
  11717. +#define DECLARE_LIBRARY_RENAMES RENAME_LIBRARY (ashrdi3, asr64)
  11718. +#endif
  11719. +
  11720. +#ifdef L_fixsfdi
  11721. +#define DECLARE_LIBRARY_RENAMES RENAME_LIBRARY (fixsfdi, f32_to_s64)
  11722. +#endif
  11723. +#ifdef L_fixunssfdi
  11724. +#define DECLARE_LIBRARY_RENAMES RENAME_LIBRARY (fixunssfdi, f32_to_u64)
  11725. +#endif
  11726. +#ifdef L_floatdidf
  11727. +#define DECLARE_LIBRARY_RENAMES RENAME_LIBRARY (floatdidf, s64_to_f64)
  11728. +#endif
  11729. +#ifdef L_floatdisf
  11730. +#define DECLARE_LIBRARY_RENAMES RENAME_LIBRARY (floatdisf, s64_to_f32)
  11731. +#endif
  11732. +
  11733. +#endif
  11734. diff -Nur gcc-4.4.6.orig/gcc/config/avr32/avr32.md gcc-4.4.6/gcc/config/avr32/avr32.md
  11735. --- gcc-4.4.6.orig/gcc/config/avr32/avr32.md 1970-01-01 01:00:00.000000000 +0100
  11736. +++ gcc-4.4.6/gcc/config/avr32/avr32.md 2011-10-22 19:23:08.524581303 +0200
  11737. @@ -0,0 +1,5198 @@
  11738. +;; AVR32 machine description file.
  11739. +;; Copyright 2003,2004,2005,2006,2007,2008,2009 Atmel Corporation.
  11740. +;;
  11741. +;; This file is part of GCC.
  11742. +;;
  11743. +;; This program is free software; you can redistribute it and/or modify
  11744. +;; it under the terms of the GNU General Public License as published by
  11745. +;; the Free Software Foundation; either version 2 of the License, or
  11746. +;; (at your option) any later version.
  11747. +;;
  11748. +;; This program is distributed in the hope that it will be useful,
  11749. +;; but WITHOUT ANY WARRANTY; without even the implied warranty of
  11750. +;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  11751. +;; GNU General Public License for more details.
  11752. +;;
  11753. +;; You should have received a copy of the GNU General Public License
  11754. +;; along with this program; if not, write to the Free Software
  11755. +;; Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
  11756. +
  11757. +;; -*- Mode: Scheme -*-
  11758. +
  11759. +(define_attr "type" "alu,alu2,alu_sat,mulhh,mulwh,mulww_w,mulww_d,div,machh_w,macww_w,macww_d,branch,call,load,load_rm,store,load2,load4,store2,store4,fmul,fcmps,fcmpd,fcast,fmv,fmvcpu,fldd,fstd,flds,fsts,fstm"
  11760. + (const_string "alu"))
  11761. +
  11762. +
  11763. +(define_attr "cc" "none,set_vncz,set_ncz,set_cz,set_z,set_z_if_not_v2,bld,compare,cmp_cond_insn,clobber,call_set,fpcompare,from_fpcc"
  11764. + (const_string "none"))
  11765. +
  11766. +
  11767. +; NB! Keep this in sync with enum architecture_type in avr32.h
  11768. +(define_attr "pipeline" "ap,ucr1,ucr2,ucr2nomul,ucr3,ucr3fp"
  11769. + (const (symbol_ref "avr32_arch->arch_type")))
  11770. +
  11771. +; Insn length in bytes
  11772. +(define_attr "length" ""
  11773. + (const_int 4))
  11774. +
  11775. +; Signal if an insn is predicable and hence can be conditionally executed.
  11776. +(define_attr "predicable" "no,yes" (const_string "no"))
  11777. +
  11778. +;; Uses of UNSPEC in this file:
  11779. +(define_constants
  11780. + [(UNSPEC_PUSHM 0)
  11781. + (UNSPEC_POPM 1)
  11782. + (UNSPEC_UDIVMODSI4_INTERNAL 2)
  11783. + (UNSPEC_DIVMODSI4_INTERNAL 3)
  11784. + (UNSPEC_STM 4)
  11785. + (UNSPEC_LDM 5)
  11786. + (UNSPEC_MOVSICC 6)
  11787. + (UNSPEC_ADDSICC 7)
  11788. + (UNSPEC_COND_MI 8)
  11789. + (UNSPEC_COND_PL 9)
  11790. + (UNSPEC_PIC_SYM 10)
  11791. + (UNSPEC_PIC_BASE 11)
  11792. + (UNSPEC_STORE_MULTIPLE 12)
  11793. + (UNSPEC_STMFP 13)
  11794. + (UNSPEC_FRCPA 14)
  11795. + (UNSPEC_REG_TO_CC 15)
  11796. + (UNSPEC_FORCE_MINIPOOL 16)
  11797. + (UNSPEC_SATS 17)
  11798. + (UNSPEC_SATU 18)
  11799. + (UNSPEC_SATRNDS 19)
  11800. + (UNSPEC_SATRNDU 20)
  11801. + ])
  11802. +
  11803. +(define_constants
  11804. + [(VUNSPEC_EPILOGUE 0)
  11805. + (VUNSPEC_CACHE 1)
  11806. + (VUNSPEC_MTSR 2)
  11807. + (VUNSPEC_MFSR 3)
  11808. + (VUNSPEC_BLOCKAGE 4)
  11809. + (VUNSPEC_SYNC 5)
  11810. + (VUNSPEC_TLBR 6)
  11811. + (VUNSPEC_TLBW 7)
  11812. + (VUNSPEC_TLBS 8)
  11813. + (VUNSPEC_BREAKPOINT 9)
  11814. + (VUNSPEC_MTDR 10)
  11815. + (VUNSPEC_MFDR 11)
  11816. + (VUNSPEC_MVCR 12)
  11817. + (VUNSPEC_MVRC 13)
  11818. + (VUNSPEC_COP 14)
  11819. + (VUNSPEC_ALIGN 15)
  11820. + (VUNSPEC_POOL_START 16)
  11821. + (VUNSPEC_POOL_END 17)
  11822. + (VUNSPEC_POOL_4 18)
  11823. + (VUNSPEC_POOL_8 19)
  11824. + (VUNSPEC_POOL_16 20)
  11825. + (VUNSPEC_MUSFR 21)
  11826. + (VUNSPEC_MUSTR 22)
  11827. + (VUNSPEC_SYNC_CMPXCHG 23)
  11828. + (VUNSPEC_SYNC_SET_LOCK_AND_LOAD 24)
  11829. + (VUNSPEC_SYNC_STORE_IF_LOCK 25)
  11830. + (VUNSPEC_EH_RETURN 26)
  11831. + (VUNSPEC_FRS 27)
  11832. + (VUNSPEC_CSRF 28)
  11833. + (VUNSPEC_SSRF 29)
  11834. + (VUNSPEC_SLEEP 30)
  11835. + (VUNSPEC_DELAY_CYCLES 31)
  11836. + (VUNSPEC_DELAY_CYCLES_1 32)
  11837. + (VUNSPEC_DELAY_CYCLES_2 33)
  11838. + (VUNSPEC_NOP 34)
  11839. + (VUNSPEC_NOP3 35)
  11840. + ])
  11841. +
  11842. +(define_constants
  11843. + [
  11844. + ;; R7 = 15-7 = 8
  11845. + (FP_REGNUM 8)
  11846. + ;; Return Register = R12 = 15 - 12 = 3
  11847. + (RETVAL_REGNUM 3)
  11848. + ;; SP = R13 = 15 - 13 = 2
  11849. + (SP_REGNUM 2)
  11850. + ;; LR = R14 = 15 - 14 = 1
  11851. + (LR_REGNUM 1)
  11852. + ;; PC = R15 = 15 - 15 = 0
  11853. + (PC_REGNUM 0)
  11854. + ;; FPSR = GENERAL_REGS + 1 = 17
  11855. + (FPCC_REGNUM 17)
  11856. + ])
  11857. +
  11858. +
  11859. +
  11860. +
  11861. +;;******************************************************************************
  11862. +;; Macros
  11863. +;;******************************************************************************
  11864. +
  11865. +;; Integer Modes for basic alu insns
  11866. +(define_mode_iterator INTM [SI HI QI])
  11867. +(define_mode_attr alu_cc_attr [(SI "set_vncz") (HI "clobber") (QI "clobber")])
  11868. +
  11869. +;; Move word modes
  11870. +(define_mode_iterator MOVM [SI V2HI V4QI])
  11871. +
  11872. +;; For mov/addcc insns
  11873. +(define_mode_iterator ADDCC [SI HI QI])
  11874. +(define_mode_iterator MOVCC [SF SI HI QI])
  11875. +(define_mode_iterator CMP [DI SI HI QI])
  11876. +(define_mode_attr store_postfix [(SF ".w") (SI ".w") (HI ".h") (QI ".b")])
  11877. +(define_mode_attr load_postfix [(SF ".w") (SI ".w") (HI ".sh") (QI ".ub")])
  11878. +(define_mode_attr load_postfix_s [(SI ".w") (HI ".sh") (QI ".sb")])
  11879. +(define_mode_attr load_postfix_u [(SI ".w") (HI ".uh") (QI ".ub")])
  11880. +(define_mode_attr pred_mem_constraint [(SF "RKu11") (SI "RKu11") (HI "RKu10") (QI "RKu09")])
  11881. +(define_mode_attr cmp_constraint [(DI "rKu20") (SI "rKs21") (HI "r") (QI "r")])
  11882. +(define_mode_attr cmp_predicate [(DI "register_immediate_operand")
  11883. + (SI "register_const_int_operand")
  11884. + (HI "register_operand")
  11885. + (QI "register_operand")])
  11886. +(define_mode_attr cmp_length [(DI "6")
  11887. + (SI "4")
  11888. + (HI "4")
  11889. + (QI "4")])
  11890. +
  11891. +;; For all conditional insns
  11892. +(define_code_iterator any_cond_b [ge lt geu ltu])
  11893. +(define_code_iterator any_cond [gt ge lt le gtu geu ltu leu])
  11894. +(define_code_iterator any_cond4 [gt le gtu leu])
  11895. +(define_code_attr cond [(eq "eq") (ne "ne") (gt "gt") (ge "ge") (lt "lt") (le "le")
  11896. + (gtu "hi") (geu "hs") (ltu "lo") (leu "ls")])
  11897. +(define_code_attr invcond [(eq "ne") (ne "eq") (gt "le") (ge "lt") (lt "ge") (le "gt")
  11898. + (gtu "ls") (geu "lo") (ltu "hs") (leu "hi")])
  11899. +
  11900. +;; For logical operations
  11901. +(define_code_iterator logical [and ior xor])
  11902. +(define_code_attr logical_insn [(and "and") (ior "or") (xor "eor")])
  11903. +
  11904. +;; Predicable operations with three register operands
  11905. +(define_code_iterator predicable_op3 [and ior xor plus minus])
  11906. +(define_code_attr predicable_insn3 [(and "and") (ior "or") (xor "eor") (plus "add") (minus "sub")])
  11907. +(define_code_attr predicable_commutative3 [(and "%") (ior "%") (xor "%") (plus "%") (minus "")])
  11908. +
  11909. +;; Load the predicates
  11910. +(include "predicates.md")
  11911. +
  11912. +
  11913. +;;******************************************************************************
  11914. +;; Automaton pipeline description for avr32
  11915. +;;******************************************************************************
  11916. +
  11917. +(define_automaton "avr32_ap")
  11918. +
  11919. +
  11920. +(define_cpu_unit "is" "avr32_ap")
  11921. +(define_cpu_unit "a1,m1,da" "avr32_ap")
  11922. +(define_cpu_unit "a2,m2,d" "avr32_ap")
  11923. +
  11924. +;;Alu instructions
  11925. +(define_insn_reservation "alu_op" 1
  11926. + (and (eq_attr "pipeline" "ap")
  11927. + (eq_attr "type" "alu"))
  11928. + "is,a1,a2")
  11929. +
  11930. +(define_insn_reservation "alu2_op" 2
  11931. + (and (eq_attr "pipeline" "ap")
  11932. + (eq_attr "type" "alu2"))
  11933. + "is,is+a1,a1+a2,a2")
  11934. +
  11935. +(define_insn_reservation "alu_sat_op" 2
  11936. + (and (eq_attr "pipeline" "ap")
  11937. + (eq_attr "type" "alu_sat"))
  11938. + "is,a1,a2")
  11939. +
  11940. +
  11941. +;;Mul instructions
  11942. +(define_insn_reservation "mulhh_op" 2
  11943. + (and (eq_attr "pipeline" "ap")
  11944. + (eq_attr "type" "mulhh,mulwh"))
  11945. + "is,m1,m2")
  11946. +
  11947. +(define_insn_reservation "mulww_w_op" 3
  11948. + (and (eq_attr "pipeline" "ap")
  11949. + (eq_attr "type" "mulww_w"))
  11950. + "is,m1,m1+m2,m2")
  11951. +
  11952. +(define_insn_reservation "mulww_d_op" 5
  11953. + (and (eq_attr "pipeline" "ap")
  11954. + (eq_attr "type" "mulww_d"))
  11955. + "is,m1,m1+m2,m1+m2,m2,m2")
  11956. +
  11957. +(define_insn_reservation "div_op" 33
  11958. + (and (eq_attr "pipeline" "ap")
  11959. + (eq_attr "type" "div"))
  11960. + "is,m1,m1*31 + m2*31,m2")
  11961. +
  11962. +(define_insn_reservation "machh_w_op" 3
  11963. + (and (eq_attr "pipeline" "ap")
  11964. + (eq_attr "type" "machh_w"))
  11965. + "is*2,m1,m2")
  11966. +
  11967. +
  11968. +(define_insn_reservation "macww_w_op" 4
  11969. + (and (eq_attr "pipeline" "ap")
  11970. + (eq_attr "type" "macww_w"))
  11971. + "is*2,m1,m1,m2")
  11972. +
  11973. +
  11974. +(define_insn_reservation "macww_d_op" 6
  11975. + (and (eq_attr "pipeline" "ap")
  11976. + (eq_attr "type" "macww_d"))
  11977. + "is*2,m1,m1+m2,m1+m2,m2")
  11978. +
  11979. +;;Bypasses for Mac instructions, because of accumulator cache.
  11980. +;;Set latency as low as possible in order to let the compiler let
  11981. +;;mul -> mac and mac -> mac combinations which use the same
  11982. +;;accumulator cache be placed close together to avoid any
  11983. +;;instructions which can ruin the accumulator cache come inbetween.
  11984. +(define_bypass 4 "machh_w_op" "alu_op,alu2_op,alu_sat_op,load_op" "avr32_mul_waw_bypass")
  11985. +(define_bypass 5 "macww_w_op" "alu_op,alu2_op,alu_sat_op,load_op" "avr32_mul_waw_bypass")
  11986. +(define_bypass 7 "macww_d_op" "alu_op,alu2_op,alu_sat_op,load_op" "avr32_mul_waw_bypass")
  11987. +
  11988. +(define_bypass 3 "mulhh_op" "alu_op,alu2_op,alu_sat_op,load_op" "avr32_mul_waw_bypass")
  11989. +(define_bypass 4 "mulww_w_op" "alu_op,alu2_op,alu_sat_op,load_op" "avr32_mul_waw_bypass")
  11990. +(define_bypass 6 "mulww_d_op" "alu_op,alu2_op,alu_sat_op,load_op" "avr32_mul_waw_bypass")
  11991. +
  11992. +
  11993. +;;Bypasses for all mul/mac instructions followed by an instruction
  11994. +;;which reads the output AND writes the result to the same register.
  11995. +;;This will generate an Write After Write hazard which gives an
  11996. +;;extra cycle before the result is ready.
  11997. +(define_bypass 0 "machh_w_op" "machh_w_op" "avr32_valid_macmac_bypass")
  11998. +(define_bypass 0 "macww_w_op" "macww_w_op" "avr32_valid_macmac_bypass")
  11999. +(define_bypass 0 "macww_d_op" "macww_d_op" "avr32_valid_macmac_bypass")
  12000. +
  12001. +(define_bypass 0 "mulhh_op" "machh_w_op" "avr32_valid_mulmac_bypass")
  12002. +(define_bypass 0 "mulww_w_op" "macww_w_op" "avr32_valid_mulmac_bypass")
  12003. +(define_bypass 0 "mulww_d_op" "macww_d_op" "avr32_valid_mulmac_bypass")
  12004. +
  12005. +;;Branch and call instructions
  12006. +;;We assume that all branches and rcalls are predicted correctly :-)
  12007. +;;while calls use a lot of cycles.
  12008. +(define_insn_reservation "branch_op" 0
  12009. + (and (eq_attr "pipeline" "ap")
  12010. + (eq_attr "type" "branch"))
  12011. + "nothing")
  12012. +
  12013. +(define_insn_reservation "call_op" 10
  12014. + (and (eq_attr "pipeline" "ap")
  12015. + (eq_attr "type" "call"))
  12016. + "nothing")
  12017. +
  12018. +
  12019. +;;Load store instructions
  12020. +(define_insn_reservation "load_op" 2
  12021. + (and (eq_attr "pipeline" "ap")
  12022. + (eq_attr "type" "load"))
  12023. + "is,da,d")
  12024. +
  12025. +(define_insn_reservation "load_rm_op" 3
  12026. + (and (eq_attr "pipeline" "ap")
  12027. + (eq_attr "type" "load_rm"))
  12028. + "is,da,d")
  12029. +
  12030. +
  12031. +(define_insn_reservation "store_op" 0
  12032. + (and (eq_attr "pipeline" "ap")
  12033. + (eq_attr "type" "store"))
  12034. + "is,da,d")
  12035. +
  12036. +
  12037. +(define_insn_reservation "load_double_op" 3
  12038. + (and (eq_attr "pipeline" "ap")
  12039. + (eq_attr "type" "load2"))
  12040. + "is,da,da+d,d")
  12041. +
  12042. +(define_insn_reservation "load_quad_op" 4
  12043. + (and (eq_attr "pipeline" "ap")
  12044. + (eq_attr "type" "load4"))
  12045. + "is,da,da+d,da+d,d")
  12046. +
  12047. +(define_insn_reservation "store_double_op" 0
  12048. + (and (eq_attr "pipeline" "ap")
  12049. + (eq_attr "type" "store2"))
  12050. + "is,da,da+d,d")
  12051. +
  12052. +
  12053. +(define_insn_reservation "store_quad_op" 0
  12054. + (and (eq_attr "pipeline" "ap")
  12055. + (eq_attr "type" "store4"))
  12056. + "is,da,da+d,da+d,d")
  12057. +
  12058. +;;For store the operand to write to memory is read in d and
  12059. +;;the real latency between any instruction and a store is therefore
  12060. +;;one less than for the instructions which reads the operands in the first
  12061. +;;excecution stage
  12062. +(define_bypass 2 "load_double_op" "store_double_op" "avr32_store_bypass")
  12063. +(define_bypass 3 "load_quad_op" "store_quad_op" "avr32_store_bypass")
  12064. +(define_bypass 1 "load_op" "store_op" "avr32_store_bypass")
  12065. +(define_bypass 2 "load_rm_op" "store_op" "avr32_store_bypass")
  12066. +(define_bypass 1 "alu_sat_op" "store_op" "avr32_store_bypass")
  12067. +(define_bypass 1 "alu2_op" "store_op" "avr32_store_bypass")
  12068. +(define_bypass 1 "mulhh_op" "store_op" "avr32_store_bypass")
  12069. +(define_bypass 2 "mulww_w_op" "store_op" "avr32_store_bypass")
  12070. +(define_bypass 4 "mulww_d_op" "store_op" "avr32_store_bypass" )
  12071. +(define_bypass 2 "machh_w_op" "store_op" "avr32_store_bypass")
  12072. +(define_bypass 3 "macww_w_op" "store_op" "avr32_store_bypass")
  12073. +(define_bypass 5 "macww_d_op" "store_op" "avr32_store_bypass")
  12074. +
  12075. +
  12076. +; Bypass for load double operation. If only the first loaded word is needed
  12077. +; then the latency is 2
  12078. +(define_bypass 2 "load_double_op"
  12079. + "load_op,load_rm_op,alu_sat_op, alu2_op, alu_op, mulhh_op, mulww_w_op,
  12080. + mulww_d_op, machh_w_op, macww_w_op, macww_d_op"
  12081. + "avr32_valid_load_double_bypass")
  12082. +
  12083. +; Bypass for load quad operation. If only the first or second loaded word is needed
  12084. +; we set the latency to 2
  12085. +(define_bypass 2 "load_quad_op"
  12086. + "load_op,load_rm_op,alu_sat_op, alu2_op, alu_op, mulhh_op, mulww_w_op,
  12087. + mulww_d_op, machh_w_op, macww_w_op, macww_d_op"
  12088. + "avr32_valid_load_quad_bypass")
  12089. +
  12090. +
  12091. +;;******************************************************************************
  12092. +;; End of Automaton pipeline description for avr32
  12093. +;;******************************************************************************
  12094. +
  12095. +(define_cond_exec
  12096. + [(match_operator 0 "avr32_comparison_operator"
  12097. + [(match_operand:CMP 1 "register_operand" "r")
  12098. + (match_operand:CMP 2 "<CMP:cmp_predicate>" "<CMP:cmp_constraint>")])]
  12099. + "TARGET_V2_INSNS"
  12100. + "%!"
  12101. +)
  12102. +
  12103. +(define_cond_exec
  12104. + [(match_operator 0 "avr32_comparison_operator"
  12105. + [(and:SI (match_operand:SI 1 "register_operand" "r")
  12106. + (match_operand:SI 2 "one_bit_set_operand" "i"))
  12107. + (const_int 0)])]
  12108. + "TARGET_V2_INSNS"
  12109. + "%!"
  12110. + )
  12111. +
  12112. +;;=============================================================================
  12113. +;; move
  12114. +;;-----------------------------------------------------------------------------
  12115. +
  12116. +
  12117. +;;== char - 8 bits ============================================================
  12118. +(define_expand "movqi"
  12119. + [(set (match_operand:QI 0 "nonimmediate_operand" "")
  12120. + (match_operand:QI 1 "general_operand" ""))]
  12121. + ""
  12122. + {
  12123. + if ( can_create_pseudo_p () ){
  12124. + if (GET_CODE (operands[1]) == MEM && optimize){
  12125. + rtx reg = gen_reg_rtx (SImode);
  12126. +
  12127. + emit_insn (gen_zero_extendqisi2 (reg, operands[1]));
  12128. + operands[1] = gen_lowpart (QImode, reg);
  12129. + }
  12130. +
  12131. + /* One of the ops has to be in a register. */
  12132. + if (GET_CODE (operands[0]) == MEM)
  12133. + operands[1] = force_reg (QImode, operands[1]);
  12134. + }
  12135. +
  12136. + })
  12137. +
  12138. +(define_insn "*movqi_internal"
  12139. + [(set (match_operand:QI 0 "nonimmediate_operand" "=r,r,m,r")
  12140. + (match_operand:QI 1 "general_operand" "rKs08,m,r,i"))]
  12141. + "register_operand (operands[0], QImode)
  12142. + || register_operand (operands[1], QImode)"
  12143. + "@
  12144. + mov\t%0, %1
  12145. + ld.ub\t%0, %1
  12146. + st.b\t%0, %1
  12147. + mov\t%0, %1"
  12148. + [(set_attr "length" "2,4,4,4")
  12149. + (set_attr "type" "alu,load_rm,store,alu")])
  12150. +
  12151. +
  12152. +
  12153. +;;== short - 16 bits ==========================================================
  12154. +(define_expand "movhi"
  12155. + [(set (match_operand:HI 0 "nonimmediate_operand" "")
  12156. + (match_operand:HI 1 "general_operand" ""))]
  12157. + ""
  12158. + {
  12159. + if ( can_create_pseudo_p () ){
  12160. + if (GET_CODE (operands[1]) == MEM && optimize){
  12161. + rtx reg = gen_reg_rtx (SImode);
  12162. +
  12163. + emit_insn (gen_extendhisi2 (reg, operands[1]));
  12164. + operands[1] = gen_lowpart (HImode, reg);
  12165. + }
  12166. +
  12167. + /* One of the ops has to be in a register. */
  12168. + if (GET_CODE (operands[0]) == MEM)
  12169. + operands[1] = force_reg (HImode, operands[1]);
  12170. + }
  12171. +
  12172. + })
  12173. +
  12174. +
  12175. +(define_insn "*movhi_internal"
  12176. + [(set (match_operand:HI 0 "nonimmediate_operand" "=r,r,m,r")
  12177. + (match_operand:HI 1 "general_operand" "rKs08,m,r,i"))]
  12178. + "register_operand (operands[0], HImode)
  12179. + || register_operand (operands[1], HImode)"
  12180. + "@
  12181. + mov\t%0, %1
  12182. + ld.sh\t%0, %1
  12183. + st.h\t%0, %1
  12184. + mov\t%0, %1"
  12185. + [(set_attr "length" "2,4,4,4")
  12186. + (set_attr "type" "alu,load_rm,store,alu")])
  12187. +
  12188. +
  12189. +;;== int - 32 bits ============================================================
  12190. +
  12191. +(define_expand "movmisalignsi"
  12192. + [(set (match_operand:SI 0 "nonimmediate_operand" "")
  12193. + (match_operand:SI 1 "nonimmediate_operand" ""))]
  12194. + "TARGET_UNALIGNED_WORD"
  12195. + {
  12196. + }
  12197. +)
  12198. +
  12199. +(define_expand "mov<mode>"
  12200. + [(set (match_operand:MOVM 0 "avr32_non_rmw_nonimmediate_operand" "")
  12201. + (match_operand:MOVM 1 "avr32_non_rmw_general_operand" ""))]
  12202. + ""
  12203. + {
  12204. +
  12205. + /* One of the ops has to be in a register. */
  12206. + if (GET_CODE (operands[0]) == MEM)
  12207. + operands[1] = force_reg (<MODE>mode, operands[1]);
  12208. +
  12209. + /* Check for out of range immediate constants as these may
  12210. + occur during reloading, since it seems like reload does
  12211. + not check if the immediate is legitimate. Don't know if
  12212. + this is a bug? */
  12213. + if ( reload_in_progress
  12214. + && avr32_imm_in_const_pool
  12215. + && GET_CODE(operands[1]) == CONST_INT
  12216. + && !avr32_const_ok_for_constraint_p(INTVAL(operands[1]), 'K', "Ks21") ){
  12217. + operands[1] = force_const_mem(SImode, operands[1]);
  12218. + }
  12219. + /* Check for RMW memory operands. They are not allowed for mov operations
  12220. + only the atomic memc/s/t operations */
  12221. + if ( !reload_in_progress
  12222. + && avr32_rmw_memory_operand (operands[0], <MODE>mode) ){
  12223. + operands[0] = copy_rtx (operands[0]);
  12224. + XEXP(operands[0], 0) = force_reg (<MODE>mode, XEXP(operands[0], 0));
  12225. + }
  12226. +
  12227. + if ( !reload_in_progress
  12228. + && avr32_rmw_memory_operand (operands[1], <MODE>mode) ){
  12229. + operands[1] = copy_rtx (operands[1]);
  12230. + XEXP(operands[1], 0) = force_reg (<MODE>mode, XEXP(operands[1], 0));
  12231. + }
  12232. + if ( (flag_pic || TARGET_HAS_ASM_ADDR_PSEUDOS)
  12233. + && !avr32_legitimate_pic_operand_p(operands[1]) )
  12234. + operands[1] = legitimize_pic_address (operands[1], <MODE>mode,
  12235. + (can_create_pseudo_p () ? 0: operands[0]));
  12236. + else if ( flag_pic && avr32_address_operand(operands[1], GET_MODE(operands[1])) )
  12237. + /* If we have an address operand then this function uses the pic register. */
  12238. + crtl->uses_pic_offset_table = 1;
  12239. + })
  12240. +
  12241. +
  12242. +(define_insn "mov<mode>_internal"
  12243. + [(set (match_operand:MOVM 0 "avr32_non_rmw_nonimmediate_operand" "=r, r, r,r,r,Q,r")
  12244. + (match_operand:MOVM 1 "avr32_non_rmw_general_operand" "rKs08,Ks21,J,n,Q,r,W"))]
  12245. + "(register_operand (operands[0], <MODE>mode)
  12246. + || register_operand (operands[1], <MODE>mode))
  12247. + && !avr32_rmw_memory_operand (operands[0], <MODE>mode)
  12248. + && !avr32_rmw_memory_operand (operands[1], <MODE>mode)"
  12249. + {
  12250. + switch (which_alternative) {
  12251. + case 0:
  12252. + case 1: return "mov\t%0, %1";
  12253. + case 2:
  12254. + if ( TARGET_V2_INSNS )
  12255. + return "movh\t%0, hi(%1)";
  12256. + /* Fallthrough */
  12257. + case 3: return "mov\t%0, lo(%1)\;orh\t%0,hi(%1)";
  12258. + case 4:
  12259. + if ( (REG_P(XEXP(operands[1], 0))
  12260. + && REGNO(XEXP(operands[1], 0)) == SP_REGNUM)
  12261. + || (GET_CODE(XEXP(operands[1], 0)) == PLUS
  12262. + && REGNO(XEXP(XEXP(operands[1], 0), 0)) == SP_REGNUM
  12263. + && GET_CODE(XEXP(XEXP(operands[1], 0), 1)) == CONST_INT
  12264. + && INTVAL(XEXP(XEXP(operands[1], 0), 1)) % 4 == 0
  12265. + && INTVAL(XEXP(XEXP(operands[1], 0), 1)) <= 0x1FC) )
  12266. + return "lddsp\t%0, %1";
  12267. + else if ( avr32_const_pool_ref_operand(operands[1], GET_MODE(operands[1])) )
  12268. + return "lddpc\t%0, %1";
  12269. + else
  12270. + return "ld.w\t%0, %1";
  12271. + case 5:
  12272. + if ( (REG_P(XEXP(operands[0], 0))
  12273. + && REGNO(XEXP(operands[0], 0)) == SP_REGNUM)
  12274. + || (GET_CODE(XEXP(operands[0], 0)) == PLUS
  12275. + && REGNO(XEXP(XEXP(operands[0], 0), 0)) == SP_REGNUM
  12276. + && GET_CODE(XEXP(XEXP(operands[0], 0), 1)) == CONST_INT
  12277. + && INTVAL(XEXP(XEXP(operands[0], 0), 1)) % 4 == 0
  12278. + && INTVAL(XEXP(XEXP(operands[0], 0), 1)) <= 0x1FC) )
  12279. + return "stdsp\t%0, %1";
  12280. + else
  12281. + return "st.w\t%0, %1";
  12282. + case 6:
  12283. + if ( TARGET_HAS_ASM_ADDR_PSEUDOS )
  12284. + return "lda.w\t%0, %1";
  12285. + else
  12286. + return "ld.w\t%0, r6[%1@got]";
  12287. + default:
  12288. + abort();
  12289. + }
  12290. + }
  12291. +
  12292. + [(set_attr "length" "2,4,4,8,4,4,8")
  12293. + (set_attr "type" "alu,alu,alu,alu2,load,store,load")
  12294. + (set_attr "cc" "none,none,set_z_if_not_v2,set_z,none,none,clobber")])
  12295. +
  12296. +
  12297. +(define_expand "reload_out_rmw_memory_operand"
  12298. + [(set (match_operand:SI 2 "register_operand" "=r")
  12299. + (match_operand:SI 0 "address_operand" ""))
  12300. + (set (mem:SI (match_dup 2))
  12301. + (match_operand:SI 1 "register_operand" ""))]
  12302. + ""
  12303. + {
  12304. + operands[0] = XEXP(operands[0], 0);
  12305. + }
  12306. +)
  12307. +
  12308. +(define_expand "reload_in_rmw_memory_operand"
  12309. + [(set (match_operand:SI 2 "register_operand" "=r")
  12310. + (match_operand:SI 1 "address_operand" ""))
  12311. + (set (match_operand:SI 0 "register_operand" "")
  12312. + (mem:SI (match_dup 2)))]
  12313. + ""
  12314. + {
  12315. + operands[1] = XEXP(operands[1], 0);
  12316. + }
  12317. +)
  12318. +
  12319. +
  12320. +;; These instructions are for loading constants which cannot be loaded
  12321. +;; directly from the constant pool because the offset is too large
  12322. +;; high and lo_sum are used even tough for our case it should be
  12323. +;; low and high sum :-)
  12324. +(define_insn "mov_symbol_lo"
  12325. + [(set (match_operand:SI 0 "register_operand" "=r")
  12326. + (high:SI (match_operand:SI 1 "immediate_operand" "i" )))]
  12327. + ""
  12328. + "mov\t%0, lo(%1)"
  12329. + [(set_attr "type" "alu")
  12330. + (set_attr "length" "4")]
  12331. +)
  12332. +
  12333. +(define_insn "add_symbol_hi"
  12334. + [(set (match_operand:SI 0 "register_operand" "=r")
  12335. + (lo_sum:SI (match_dup 0)
  12336. + (match_operand:SI 1 "immediate_operand" "i" )))]
  12337. + ""
  12338. + "orh\t%0, hi(%1)"
  12339. + [(set_attr "type" "alu")
  12340. + (set_attr "length" "4")]
  12341. +)
  12342. +
  12343. +
  12344. +
  12345. +;; When generating pic, we need to load the symbol offset into a register.
  12346. +;; So that the optimizer does not confuse this with a normal symbol load
  12347. +;; we use an unspec. The offset will be loaded from a constant pool entry,
  12348. +;; since that is the only type of relocation we can use.
  12349. +(define_insn "pic_load_addr"
  12350. + [(set (match_operand:SI 0 "register_operand" "=r")
  12351. + (unspec:SI [(match_operand:SI 1 "" "")] UNSPEC_PIC_SYM))]
  12352. + "flag_pic && CONSTANT_POOL_ADDRESS_P(XEXP(operands[1], 0))"
  12353. + "lddpc\t%0, %1"
  12354. + [(set_attr "type" "load")
  12355. + (set_attr "length" "4")]
  12356. +)
  12357. +
  12358. +(define_insn "pic_compute_got_from_pc"
  12359. + [(set (match_operand:SI 0 "register_operand" "+r")
  12360. + (unspec:SI [(minus:SI (pc)
  12361. + (match_dup 0))] UNSPEC_PIC_BASE))
  12362. + (use (label_ref (match_operand 1 "" "")))]
  12363. + "flag_pic"
  12364. + {
  12365. + (*targetm.asm_out.internal_label) (asm_out_file, "L",
  12366. + CODE_LABEL_NUMBER (operands[1]));
  12367. + return \"rsub\t%0, pc\";
  12368. + }
  12369. + [(set_attr "cc" "clobber")
  12370. + (set_attr "length" "2")]
  12371. +)
  12372. +
  12373. +;;== long long int - 64 bits ==================================================
  12374. +
  12375. +(define_expand "movdi"
  12376. + [(set (match_operand:DI 0 "nonimmediate_operand" "")
  12377. + (match_operand:DI 1 "general_operand" ""))]
  12378. + ""
  12379. + {
  12380. +
  12381. + /* One of the ops has to be in a register. */
  12382. + if (GET_CODE (operands[0]) != REG)
  12383. + operands[1] = force_reg (DImode, operands[1]);
  12384. +
  12385. + })
  12386. +
  12387. +
  12388. +(define_insn_and_split "*movdi_internal"
  12389. + [(set (match_operand:DI 0 "nonimmediate_operand" "=r,r, r, r,r,r,m")
  12390. + (match_operand:DI 1 "general_operand" "r, Ks08,Ks21,G,n,m,r"))]
  12391. + "register_operand (operands[0], DImode)
  12392. + || register_operand (operands[1], DImode)"
  12393. + {
  12394. + switch (which_alternative ){
  12395. + case 0:
  12396. + case 1:
  12397. + case 2:
  12398. + case 3:
  12399. + case 4:
  12400. + return "#";
  12401. + case 5:
  12402. + if ( avr32_const_pool_ref_operand(operands[1], GET_MODE(operands[1])))
  12403. + return "ld.d\t%0, pc[%1 - .]";
  12404. + else
  12405. + return "ld.d\t%0, %1";
  12406. + case 6:
  12407. + return "st.d\t%0, %1";
  12408. + default:
  12409. + abort();
  12410. + }
  12411. + }
  12412. +;; Lets split all reg->reg or imm->reg transfers into two SImode transfers
  12413. + "reload_completed &&
  12414. + (REG_P (operands[0]) &&
  12415. + (REG_P (operands[1])
  12416. + || GET_CODE (operands[1]) == CONST_INT
  12417. + || GET_CODE (operands[1]) == CONST_DOUBLE))"
  12418. + [(set (match_dup 0) (match_dup 1))
  12419. + (set (match_dup 2) (match_dup 3))]
  12420. + {
  12421. + operands[2] = gen_highpart (SImode, operands[0]);
  12422. + operands[0] = gen_lowpart (SImode, operands[0]);
  12423. + if ( REG_P(operands[1]) ){
  12424. + operands[3] = gen_highpart(SImode, operands[1]);
  12425. + operands[1] = gen_lowpart(SImode, operands[1]);
  12426. + } else if ( GET_CODE(operands[1]) == CONST_DOUBLE
  12427. + || GET_CODE(operands[1]) == CONST_INT ){
  12428. + rtx split_const[2];
  12429. + avr32_split_const_expr (DImode, SImode, operands[1], split_const);
  12430. + operands[3] = split_const[1];
  12431. + operands[1] = split_const[0];
  12432. + } else {
  12433. + internal_error("Illegal operand[1] for movdi split!");
  12434. + }
  12435. + }
  12436. +
  12437. + [(set_attr "length" "*,*,*,*,*,4,4")
  12438. + (set_attr "type" "*,*,*,*,*,load2,store2")
  12439. + (set_attr "cc" "*,*,*,*,*,none,none")])
  12440. +
  12441. +
  12442. +;;== 128 bits ==================================================
  12443. +(define_expand "movti"
  12444. + [(set (match_operand:TI 0 "nonimmediate_operand" "")
  12445. + (match_operand:TI 1 "nonimmediate_operand" ""))]
  12446. + "TARGET_ARCH_AP"
  12447. + {
  12448. +
  12449. + /* One of the ops has to be in a register. */
  12450. + if (GET_CODE (operands[0]) != REG)
  12451. + operands[1] = force_reg (TImode, operands[1]);
  12452. +
  12453. + /* We must fix any pre_dec for loads and post_inc stores */
  12454. + if ( GET_CODE (operands[0]) == MEM
  12455. + && GET_CODE (XEXP(operands[0],0)) == POST_INC ){
  12456. + emit_move_insn(gen_rtx_MEM(TImode, XEXP(XEXP(operands[0],0),0)), operands[1]);
  12457. + emit_insn(gen_addsi3(XEXP(XEXP(operands[0],0),0), XEXP(XEXP(operands[0],0),0), GEN_INT(GET_MODE_SIZE(TImode))));
  12458. + DONE;
  12459. + }
  12460. +
  12461. + if ( GET_CODE (operands[1]) == MEM
  12462. + && GET_CODE (XEXP(operands[1],0)) == PRE_DEC ){
  12463. + emit_insn(gen_addsi3(XEXP(XEXP(operands[1],0),0), XEXP(XEXP(operands[1],0),0), GEN_INT(-GET_MODE_SIZE(TImode))));
  12464. + emit_move_insn(operands[0], gen_rtx_MEM(TImode, XEXP(XEXP(operands[1],0),0)));
  12465. + DONE;
  12466. + }
  12467. + })
  12468. +
  12469. +
  12470. +(define_insn_and_split "*movti_internal"
  12471. + [(set (match_operand:TI 0 "avr32_movti_dst_operand" "=r,&r, r, <RKu00,r,r")
  12472. + (match_operand:TI 1 "avr32_movti_src_operand" " r,RKu00>,RKu00,r, n,T"))]
  12473. + "(register_operand (operands[0], TImode)
  12474. + || register_operand (operands[1], TImode))"
  12475. + {
  12476. + switch (which_alternative ){
  12477. + case 0:
  12478. + case 2:
  12479. + case 4:
  12480. + return "#";
  12481. + case 1:
  12482. + return "ldm\t%p1, %0";
  12483. + case 3:
  12484. + return "stm\t%p0, %1";
  12485. + case 5:
  12486. + return "ld.d\t%U0, pc[%1 - .]\;ld.d\t%B0, pc[%1 - . + 8]";
  12487. + }
  12488. + }
  12489. +
  12490. + "reload_completed &&
  12491. + (REG_P (operands[0]) &&
  12492. + (REG_P (operands[1])
  12493. + /* If this is a load from the constant pool we split it into
  12494. + two double loads. */
  12495. + || (GET_CODE (operands[1]) == MEM
  12496. + && GET_CODE (XEXP (operands[1], 0)) == SYMBOL_REF
  12497. + && CONSTANT_POOL_ADDRESS_P (XEXP (operands[1], 0)))
  12498. + /* If this is a load where the pointer register is a part
  12499. + of the register list, we must split it into two double
  12500. + loads in order for it to be exception safe. */
  12501. + || (GET_CODE (operands[1]) == MEM
  12502. + && register_operand (XEXP (operands[1], 0), SImode)
  12503. + && reg_overlap_mentioned_p (operands[0], XEXP (operands[1], 0)))
  12504. + || GET_CODE (operands[1]) == CONST_INT
  12505. + || GET_CODE (operands[1]) == CONST_DOUBLE))"
  12506. + [(set (match_dup 0) (match_dup 1))
  12507. + (set (match_dup 2) (match_dup 3))]
  12508. + {
  12509. + operands[2] = simplify_gen_subreg ( DImode, operands[0],
  12510. + TImode, 0 );
  12511. + operands[0] = simplify_gen_subreg ( DImode, operands[0],
  12512. + TImode, 8 );
  12513. + if ( REG_P(operands[1]) ){
  12514. + operands[3] = simplify_gen_subreg ( DImode, operands[1],
  12515. + TImode, 0 );
  12516. + operands[1] = simplify_gen_subreg ( DImode, operands[1],
  12517. + TImode, 8 );
  12518. + } else if ( GET_CODE(operands[1]) == CONST_DOUBLE
  12519. + || GET_CODE(operands[1]) == CONST_INT ){
  12520. + rtx split_const[2];
  12521. + avr32_split_const_expr (TImode, DImode, operands[1], split_const);
  12522. + operands[3] = split_const[1];
  12523. + operands[1] = split_const[0];
  12524. + } else if (avr32_const_pool_ref_operand (operands[1], GET_MODE(operands[1]))){
  12525. + rtx split_const[2];
  12526. + rtx cop = avoid_constant_pool_reference (operands[1]);
  12527. + if (operands[1] == cop)
  12528. + cop = get_pool_constant (XEXP (operands[1], 0));
  12529. + avr32_split_const_expr (TImode, DImode, cop, split_const);
  12530. + operands[3] = force_const_mem (DImode, split_const[1]);
  12531. + operands[1] = force_const_mem (DImode, split_const[0]);
  12532. + } else {
  12533. + rtx ptr_reg = XEXP (operands[1], 0);
  12534. + operands[1] = gen_rtx_MEM (DImode,
  12535. + gen_rtx_PLUS ( SImode,
  12536. + ptr_reg,
  12537. + GEN_INT (8) ));
  12538. + operands[3] = gen_rtx_MEM (DImode,
  12539. + ptr_reg);
  12540. +
  12541. + /* Check if the first load will clobber the pointer.
  12542. + If so, we must switch the order of the operations. */
  12543. + if ( reg_overlap_mentioned_p (operands[0], ptr_reg) )
  12544. + {
  12545. + /* We need to switch the order of the operations
  12546. + so that the pointer register does not get clobbered
  12547. + after the first double word load. */
  12548. + rtx tmp;
  12549. + tmp = operands[0];
  12550. + operands[0] = operands[2];
  12551. + operands[2] = tmp;
  12552. + tmp = operands[1];
  12553. + operands[1] = operands[3];
  12554. + operands[3] = tmp;
  12555. + }
  12556. +
  12557. +
  12558. + }
  12559. + }
  12560. + [(set_attr "length" "*,*,4,4,*,8")
  12561. + (set_attr "type" "*,*,load4,store4,*,load4")])
  12562. +
  12563. +
  12564. +;;== float - 32 bits ==========================================================
  12565. +(define_expand "movsf"
  12566. + [(set (match_operand:SF 0 "nonimmediate_operand" "")
  12567. + (match_operand:SF 1 "general_operand" ""))]
  12568. + ""
  12569. + {
  12570. +
  12571. +
  12572. + /* One of the ops has to be in a register. */
  12573. + if (GET_CODE (operands[0]) != REG)
  12574. + operands[1] = force_reg (SFmode, operands[1]);
  12575. +
  12576. + })
  12577. +
  12578. +(define_insn "*movsf_internal"
  12579. + [(set (match_operand:SF 0 "nonimmediate_operand" "=r,r,r,r,m")
  12580. + (match_operand:SF 1 "general_operand" "r, G,F,m,r"))]
  12581. + "(register_operand (operands[0], SFmode)
  12582. + || register_operand (operands[1], SFmode))"
  12583. + {
  12584. + switch (which_alternative) {
  12585. + case 0:
  12586. + case 1: return "mov\t%0, %1";
  12587. + case 2:
  12588. + {
  12589. + HOST_WIDE_INT target_float[2];
  12590. + real_to_target (target_float, CONST_DOUBLE_REAL_VALUE (operands[1]), SFmode);
  12591. + if ( TARGET_V2_INSNS
  12592. + && avr32_hi16_immediate_operand (GEN_INT (target_float[0]), VOIDmode) )
  12593. + return "movh\t%0, hi(%1)";
  12594. + else
  12595. + return "mov\t%0, lo(%1)\;orh\t%0, hi(%1)";
  12596. + }
  12597. + case 3:
  12598. + if ( (REG_P(XEXP(operands[1], 0))
  12599. + && REGNO(XEXP(operands[1], 0)) == SP_REGNUM)
  12600. + || (GET_CODE(XEXP(operands[1], 0)) == PLUS
  12601. + && REGNO(XEXP(XEXP(operands[1], 0), 0)) == SP_REGNUM
  12602. + && GET_CODE(XEXP(XEXP(operands[1], 0), 1)) == CONST_INT
  12603. + && INTVAL(XEXP(XEXP(operands[1], 0), 1)) % 4 == 0
  12604. + && INTVAL(XEXP(XEXP(operands[1], 0), 1)) <= 0x1FC) )
  12605. + return "lddsp\t%0, %1";
  12606. + else if ( avr32_const_pool_ref_operand(operands[1], GET_MODE(operands[1])) )
  12607. + return "lddpc\t%0, %1";
  12608. + else
  12609. + return "ld.w\t%0, %1";
  12610. + case 4:
  12611. + if ( (REG_P(XEXP(operands[0], 0))
  12612. + && REGNO(XEXP(operands[0], 0)) == SP_REGNUM)
  12613. + || (GET_CODE(XEXP(operands[0], 0)) == PLUS
  12614. + && REGNO(XEXP(XEXP(operands[0], 0), 0)) == SP_REGNUM
  12615. + && GET_CODE(XEXP(XEXP(operands[0], 0), 1)) == CONST_INT
  12616. + && INTVAL(XEXP(XEXP(operands[0], 0), 1)) % 4 == 0
  12617. + && INTVAL(XEXP(XEXP(operands[0], 0), 1)) <= 0x1FC) )
  12618. + return "stdsp\t%0, %1";
  12619. + else
  12620. + return "st.w\t%0, %1";
  12621. + default:
  12622. + abort();
  12623. + }
  12624. + }
  12625. +
  12626. + [(set_attr "length" "2,4,8,4,4")
  12627. + (set_attr "type" "alu,alu,alu2,load,store")
  12628. + (set_attr "cc" "none,none,clobber,none,none")])
  12629. +
  12630. +
  12631. +
  12632. +;;== double - 64 bits =========================================================
  12633. +(define_expand "movdf"
  12634. + [(set (match_operand:DF 0 "nonimmediate_operand" "")
  12635. + (match_operand:DF 1 "general_operand" ""))]
  12636. + ""
  12637. + {
  12638. + /* One of the ops has to be in a register. */
  12639. + if (GET_CODE (operands[0]) != REG){
  12640. + operands[1] = force_reg (DFmode, operands[1]);
  12641. + }
  12642. + })
  12643. +
  12644. +
  12645. +(define_insn_and_split "*movdf_internal"
  12646. + [(set (match_operand:DF 0 "nonimmediate_operand" "=r,r,r,r,m")
  12647. + (match_operand:DF 1 "general_operand" " r,G,F,m,r"))]
  12648. + "(register_operand (operands[0], DFmode)
  12649. + || register_operand (operands[1], DFmode))"
  12650. + {
  12651. + switch (which_alternative ){
  12652. + case 0:
  12653. + case 1:
  12654. + case 2:
  12655. + return "#";
  12656. + case 3:
  12657. + if ( avr32_const_pool_ref_operand(operands[1], GET_MODE(operands[1])))
  12658. + return "ld.d\t%0, pc[%1 - .]";
  12659. + else
  12660. + return "ld.d\t%0, %1";
  12661. + case 4:
  12662. + return "st.d\t%0, %1";
  12663. + default:
  12664. + abort();
  12665. + }
  12666. + }
  12667. + "reload_completed
  12668. + && (REG_P (operands[0])
  12669. + && (REG_P (operands[1])
  12670. + || GET_CODE (operands[1]) == CONST_DOUBLE))"
  12671. + [(set (match_dup 0) (match_dup 1))
  12672. + (set (match_dup 2) (match_dup 3))]
  12673. + "
  12674. + {
  12675. + operands[2] = gen_highpart (SImode, operands[0]);
  12676. + operands[0] = gen_lowpart (SImode, operands[0]);
  12677. + operands[3] = gen_highpart(SImode, operands[1]);
  12678. + operands[1] = gen_lowpart(SImode, operands[1]);
  12679. + }
  12680. + "
  12681. +
  12682. + [(set_attr "length" "*,*,*,4,4")
  12683. + (set_attr "type" "*,*,*,load2,store2")
  12684. + (set_attr "cc" "*,*,*,none,none")])
  12685. +
  12686. +
  12687. +;;=============================================================================
  12688. +;; Conditional Moves
  12689. +;;=============================================================================
  12690. +(define_insn "ld<mode>_predicable"
  12691. + [(set (match_operand:MOVCC 0 "register_operand" "=r")
  12692. + (match_operand:MOVCC 1 "avr32_non_rmw_memory_operand" "<MOVCC:pred_mem_constraint>"))]
  12693. + "TARGET_V2_INSNS"
  12694. + "ld<MOVCC:load_postfix>%?\t%0, %1"
  12695. + [(set_attr "length" "4")
  12696. + (set_attr "cc" "cmp_cond_insn")
  12697. + (set_attr "type" "load")
  12698. + (set_attr "predicable" "yes")]
  12699. +)
  12700. +
  12701. +
  12702. +(define_insn "st<mode>_predicable"
  12703. + [(set (match_operand:MOVCC 0 "avr32_non_rmw_memory_operand" "=<MOVCC:pred_mem_constraint>")
  12704. + (match_operand:MOVCC 1 "register_operand" "r"))]
  12705. + "TARGET_V2_INSNS"
  12706. + "st<MOVCC:store_postfix>%?\t%0, %1"
  12707. + [(set_attr "length" "4")
  12708. + (set_attr "cc" "cmp_cond_insn")
  12709. + (set_attr "type" "store")
  12710. + (set_attr "predicable" "yes")]
  12711. +)
  12712. +
  12713. +(define_insn "mov<mode>_predicable"
  12714. + [(set (match_operand:MOVCC 0 "register_operand" "=r")
  12715. + (match_operand:MOVCC 1 "avr32_cond_register_immediate_operand" "rKs08"))]
  12716. + ""
  12717. + "mov%?\t%0, %1"
  12718. + [(set_attr "length" "4")
  12719. + (set_attr "cc" "cmp_cond_insn")
  12720. + (set_attr "type" "alu")
  12721. + (set_attr "predicable" "yes")]
  12722. +)
  12723. +
  12724. +
  12725. +;;=============================================================================
  12726. +;; Move chunks of memory
  12727. +;;=============================================================================
  12728. +
  12729. +(define_expand "movmemsi"
  12730. + [(match_operand:BLK 0 "general_operand" "")
  12731. + (match_operand:BLK 1 "general_operand" "")
  12732. + (match_operand:SI 2 "const_int_operand" "")
  12733. + (match_operand:SI 3 "const_int_operand" "")]
  12734. + ""
  12735. + "
  12736. + if (avr32_gen_movmemsi (operands))
  12737. + DONE;
  12738. + FAIL;
  12739. + "
  12740. + )
  12741. +
  12742. +
  12743. +
  12744. +
  12745. +;;=============================================================================
  12746. +;; Bit field instructions
  12747. +;;-----------------------------------------------------------------------------
  12748. +;; Instructions to insert or extract bit-fields
  12749. +;;=============================================================================
  12750. +
  12751. +(define_insn "insv"
  12752. + [ (set (zero_extract:SI (match_operand:SI 0 "register_operand" "+r")
  12753. + (match_operand:SI 1 "immediate_operand" "Ku05")
  12754. + (match_operand:SI 2 "immediate_operand" "Ku05"))
  12755. + (match_operand 3 "register_operand" "r"))]
  12756. + ""
  12757. + "bfins\t%0, %3, %2, %1"
  12758. + [(set_attr "type" "alu")
  12759. + (set_attr "length" "4")
  12760. + (set_attr "cc" "set_ncz")])
  12761. +
  12762. +
  12763. +
  12764. +(define_expand "extv"
  12765. + [ (set (match_operand:SI 0 "register_operand" "")
  12766. + (sign_extract:SI (match_operand:SI 1 "register_operand" "")
  12767. + (match_operand:SI 2 "immediate_operand" "")
  12768. + (match_operand:SI 3 "immediate_operand" "")))]
  12769. + ""
  12770. + {
  12771. + if ( INTVAL(operands[2]) >= 32 )
  12772. + FAIL;
  12773. + }
  12774. +)
  12775. +
  12776. +(define_expand "extzv"
  12777. + [ (set (match_operand:SI 0 "register_operand" "")
  12778. + (zero_extract:SI (match_operand:SI 1 "register_operand" "")
  12779. + (match_operand:SI 2 "immediate_operand" "")
  12780. + (match_operand:SI 3 "immediate_operand" "")))]
  12781. + ""
  12782. + {
  12783. + if ( INTVAL(operands[2]) >= 32 )
  12784. + FAIL;
  12785. + }
  12786. +)
  12787. +
  12788. +(define_insn "extv_internal"
  12789. + [ (set (match_operand:SI 0 "register_operand" "=r")
  12790. + (sign_extract:SI (match_operand:SI 1 "register_operand" "r")
  12791. + (match_operand:SI 2 "immediate_operand" "Ku05")
  12792. + (match_operand:SI 3 "immediate_operand" "Ku05")))]
  12793. + "INTVAL(operands[2]) < 32"
  12794. + "bfexts\t%0, %1, %3, %2"
  12795. + [(set_attr "type" "alu")
  12796. + (set_attr "length" "4")
  12797. + (set_attr "cc" "set_ncz")])
  12798. +
  12799. +
  12800. +(define_insn "extzv_internal"
  12801. + [ (set (match_operand:SI 0 "register_operand" "=r")
  12802. + (zero_extract:SI (match_operand:SI 1 "register_operand" "r")
  12803. + (match_operand:SI 2 "immediate_operand" "Ku05")
  12804. + (match_operand:SI 3 "immediate_operand" "Ku05")))]
  12805. + "INTVAL(operands[2]) < 32"
  12806. + "bfextu\t%0, %1, %3, %2"
  12807. + [(set_attr "type" "alu")
  12808. + (set_attr "length" "4")
  12809. + (set_attr "cc" "set_ncz")])
  12810. +
  12811. +
  12812. +
  12813. +;;=============================================================================
  12814. +;; Some peepholes for avoiding unnecessary cast instructions
  12815. +;; followed by bfins.
  12816. +;;-----------------------------------------------------------------------------
  12817. +
  12818. +(define_peephole2
  12819. + [(set (match_operand:SI 0 "register_operand" "")
  12820. + (zero_extend:SI (match_operand:QI 1 "register_operand" "")))
  12821. + (set (zero_extract:SI (match_operand 2 "register_operand" "")
  12822. + (match_operand:SI 3 "immediate_operand" "")
  12823. + (match_operand:SI 4 "immediate_operand" ""))
  12824. + (match_dup 0))]
  12825. + "((peep2_reg_dead_p(2, operands[0]) &&
  12826. + (INTVAL(operands[3]) <= 8)))"
  12827. + [(set (zero_extract:SI (match_dup 2)
  12828. + (match_dup 3)
  12829. + (match_dup 4))
  12830. + (match_dup 1))]
  12831. + )
  12832. +
  12833. +(define_peephole2
  12834. + [(set (match_operand:SI 0 "register_operand" "")
  12835. + (zero_extend:SI (match_operand:HI 1 "register_operand" "")))
  12836. + (set (zero_extract:SI (match_operand 2 "register_operand" "")
  12837. + (match_operand:SI 3 "immediate_operand" "")
  12838. + (match_operand:SI 4 "immediate_operand" ""))
  12839. + (match_dup 0))]
  12840. + "((peep2_reg_dead_p(2, operands[0]) &&
  12841. + (INTVAL(operands[3]) <= 16)))"
  12842. + [(set (zero_extract:SI (match_dup 2)
  12843. + (match_dup 3)
  12844. + (match_dup 4))
  12845. + (match_dup 1))]
  12846. + )
  12847. +
  12848. +;;=============================================================================
  12849. +;; push bytes
  12850. +;;-----------------------------------------------------------------------------
  12851. +;; Implements the push instruction
  12852. +;;=============================================================================
  12853. +(define_insn "pushm"
  12854. + [(set (mem:BLK (pre_dec:BLK (reg:SI SP_REGNUM)))
  12855. + (unspec:BLK [(match_operand 0 "const_int_operand" "")]
  12856. + UNSPEC_PUSHM))]
  12857. + ""
  12858. + {
  12859. + if (INTVAL(operands[0])) {
  12860. + return "pushm\t%r0";
  12861. + } else {
  12862. + return "";
  12863. + }
  12864. + }
  12865. + [(set_attr "type" "store")
  12866. + (set_attr "length" "2")
  12867. + (set_attr "cc" "none")])
  12868. +
  12869. +(define_insn "stm"
  12870. + [(unspec [(match_operand 0 "register_operand" "r")
  12871. + (match_operand 1 "const_int_operand" "")
  12872. + (match_operand 2 "const_int_operand" "")]
  12873. + UNSPEC_STM)]
  12874. + ""
  12875. + {
  12876. + if (INTVAL(operands[1])) {
  12877. + if (INTVAL(operands[2]) != 0)
  12878. + return "stm\t--%0, %s1";
  12879. + else
  12880. + return "stm\t%0, %s1";
  12881. + } else {
  12882. + return "";
  12883. + }
  12884. + }
  12885. + [(set_attr "type" "store")
  12886. + (set_attr "length" "4")
  12887. + (set_attr "cc" "none")])
  12888. +
  12889. +
  12890. +
  12891. +(define_insn "popm"
  12892. + [(unspec [(match_operand 0 "const_int_operand" "")]
  12893. + UNSPEC_POPM)]
  12894. + ""
  12895. + {
  12896. + if (INTVAL(operands[0])) {
  12897. + return "popm %r0";
  12898. + } else {
  12899. + return "";
  12900. + }
  12901. + }
  12902. + [(set_attr "type" "load")
  12903. + (set_attr "length" "2")])
  12904. +
  12905. +
  12906. +
  12907. +;;=============================================================================
  12908. +;; add
  12909. +;;-----------------------------------------------------------------------------
  12910. +;; Adds reg1 with reg2 and puts the result in reg0.
  12911. +;;=============================================================================
  12912. +(define_insn "add<mode>3"
  12913. + [(set (match_operand:INTM 0 "register_operand" "=r,r,r,r,r")
  12914. + (plus:INTM (match_operand:INTM 1 "register_operand" "%0,r,0,r,0")
  12915. + (match_operand:INTM 2 "avr32_add_operand" "r,r,Is08,Is16,Is21")))]
  12916. + ""
  12917. + "@
  12918. + add %0, %2
  12919. + add %0, %1, %2
  12920. + sub %0, %n2
  12921. + sub %0, %1, %n2
  12922. + sub %0, %n2"
  12923. +
  12924. + [(set_attr "length" "2,4,2,4,4")
  12925. + (set_attr "cc" "<INTM:alu_cc_attr>")])
  12926. +
  12927. +(define_insn "add<mode>3_lsl"
  12928. + [(set (match_operand:INTM 0 "register_operand" "=r")
  12929. + (plus:INTM (ashift:INTM (match_operand:INTM 1 "register_operand" "r")
  12930. + (match_operand:INTM 3 "avr32_add_shift_immediate_operand" "Ku02"))
  12931. + (match_operand:INTM 2 "register_operand" "r")))]
  12932. + ""
  12933. + "add %0, %2, %1 << %3"
  12934. + [(set_attr "length" "4")
  12935. + (set_attr "cc" "<INTM:alu_cc_attr>")])
  12936. +
  12937. +(define_insn "add<mode>3_lsl2"
  12938. + [(set (match_operand:INTM 0 "register_operand" "=r")
  12939. + (plus:INTM (match_operand:INTM 1 "register_operand" "r")
  12940. + (ashift:INTM (match_operand:INTM 2 "register_operand" "r")
  12941. + (match_operand:INTM 3 "avr32_add_shift_immediate_operand" "Ku02"))))]
  12942. + ""
  12943. + "add %0, %1, %2 << %3"
  12944. + [(set_attr "length" "4")
  12945. + (set_attr "cc" "<INTM:alu_cc_attr>")])
  12946. +
  12947. +
  12948. +(define_insn "add<mode>3_mul"
  12949. + [(set (match_operand:INTM 0 "register_operand" "=r")
  12950. + (plus:INTM (mult:INTM (match_operand:INTM 1 "register_operand" "r")
  12951. + (match_operand:INTM 3 "immediate_operand" "Ku04" ))
  12952. + (match_operand:INTM 2 "register_operand" "r")))]
  12953. + "(INTVAL(operands[3]) == 0) || (INTVAL(operands[3]) == 2) ||
  12954. + (INTVAL(operands[3]) == 4) || (INTVAL(operands[3]) == 8)"
  12955. + "add %0, %2, %1 << %p3"
  12956. + [(set_attr "length" "4")
  12957. + (set_attr "cc" "<INTM:alu_cc_attr>")])
  12958. +
  12959. +(define_insn "add<mode>3_mul2"
  12960. + [(set (match_operand:INTM 0 "register_operand" "=r")
  12961. + (plus:INTM (match_operand:INTM 1 "register_operand" "r")
  12962. + (mult:INTM (match_operand:INTM 2 "register_operand" "r")
  12963. + (match_operand:INTM 3 "immediate_operand" "Ku04" ))))]
  12964. + "(INTVAL(operands[3]) == 0) || (INTVAL(operands[3]) == 2) ||
  12965. + (INTVAL(operands[3]) == 4) || (INTVAL(operands[3]) == 8)"
  12966. + "add %0, %1, %2 << %p3"
  12967. + [(set_attr "length" "4")
  12968. + (set_attr "cc" "<INTM:alu_cc_attr>")])
  12969. +
  12970. +
  12971. +(define_peephole2
  12972. + [(set (match_operand:SI 0 "register_operand" "")
  12973. + (ashift:SI (match_operand:SI 1 "register_operand" "")
  12974. + (match_operand:SI 2 "immediate_operand" "")))
  12975. + (set (match_operand:SI 3 "register_operand" "")
  12976. + (plus:SI (match_dup 0)
  12977. + (match_operand:SI 4 "register_operand" "")))]
  12978. + "(peep2_reg_dead_p(2, operands[0]) &&
  12979. + (INTVAL(operands[2]) < 4 && INTVAL(operands[2]) > 0))"
  12980. + [(set (match_dup 3)
  12981. + (plus:SI (ashift:SI (match_dup 1)
  12982. + (match_dup 2))
  12983. + (match_dup 4)))]
  12984. + )
  12985. +
  12986. +(define_peephole2
  12987. + [(set (match_operand:SI 0 "register_operand" "")
  12988. + (ashift:SI (match_operand:SI 1 "register_operand" "")
  12989. + (match_operand:SI 2 "immediate_operand" "")))
  12990. + (set (match_operand:SI 3 "register_operand" "")
  12991. + (plus:SI (match_operand:SI 4 "register_operand" "")
  12992. + (match_dup 0)))]
  12993. + "(peep2_reg_dead_p(2, operands[0]) &&
  12994. + (INTVAL(operands[2]) < 4 && INTVAL(operands[2]) > 0))"
  12995. + [(set (match_dup 3)
  12996. + (plus:SI (ashift:SI (match_dup 1)
  12997. + (match_dup 2))
  12998. + (match_dup 4)))]
  12999. + )
  13000. +
  13001. +(define_insn "adddi3"
  13002. + [(set (match_operand:DI 0 "register_operand" "=r,r")
  13003. + (plus:DI (match_operand:DI 1 "register_operand" "%0,r")
  13004. + (match_operand:DI 2 "register_operand" "r,r")))]
  13005. + ""
  13006. + "@
  13007. + add %0, %2\;adc %m0, %m0, %m2
  13008. + add %0, %1, %2\;adc %m0, %m1, %m2"
  13009. + [(set_attr "length" "6,8")
  13010. + (set_attr "type" "alu2")
  13011. + (set_attr "cc" "set_vncz")])
  13012. +
  13013. +
  13014. +(define_insn "add<mode>_imm_predicable"
  13015. + [(set (match_operand:INTM 0 "register_operand" "+r")
  13016. + (plus:INTM (match_dup 0)
  13017. + (match_operand:INTM 1 "avr32_cond_immediate_operand" "%Is08")))]
  13018. + ""
  13019. + "sub%?\t%0, -%1"
  13020. + [(set_attr "length" "4")
  13021. + (set_attr "cc" "cmp_cond_insn")
  13022. + (set_attr "predicable" "yes")]
  13023. +)
  13024. +
  13025. +;;=============================================================================
  13026. +;; subtract
  13027. +;;-----------------------------------------------------------------------------
  13028. +;; Subtract reg2 or immediate value from reg0 and puts the result in reg0.
  13029. +;;=============================================================================
  13030. +
  13031. +(define_insn "sub<mode>3"
  13032. + [(set (match_operand:INTM 0 "general_operand" "=r,r,r,r,r,r,r")
  13033. + (minus:INTM (match_operand:INTM 1 "register_const_int_operand" "0,r,0,r,0,r,Ks08")
  13034. + (match_operand:INTM 2 "register_const_int_operand" "r,r,Ks08,Ks16,Ks21,0,r")))]
  13035. + ""
  13036. + "@
  13037. + sub %0, %2
  13038. + sub %0, %1, %2
  13039. + sub %0, %2
  13040. + sub %0, %1, %2
  13041. + sub %0, %2
  13042. + rsub %0, %1
  13043. + rsub %0, %2, %1"
  13044. + [(set_attr "length" "2,4,2,4,4,2,4")
  13045. + (set_attr "cc" "<INTM:alu_cc_attr>")])
  13046. +
  13047. +(define_insn "*sub<mode>3_mul"
  13048. + [(set (match_operand:INTM 0 "register_operand" "=r")
  13049. + (minus:INTM (match_operand:INTM 1 "register_operand" "r")
  13050. + (mult:INTM (match_operand:INTM 2 "register_operand" "r")
  13051. + (match_operand:SI 3 "immediate_operand" "Ku04" ))))]
  13052. + "(INTVAL(operands[3]) == 0) || (INTVAL(operands[3]) == 2) ||
  13053. + (INTVAL(operands[3]) == 4) || (INTVAL(operands[3]) == 8)"
  13054. + "sub %0, %1, %2 << %p3"
  13055. + [(set_attr "length" "4")
  13056. + (set_attr "cc" "<INTM:alu_cc_attr>")])
  13057. +
  13058. +(define_insn "*sub<mode>3_lsl"
  13059. + [(set (match_operand:INTM 0 "register_operand" "=r")
  13060. + (minus:INTM (match_operand:INTM 1 "register_operand" "r")
  13061. + (ashift:INTM (match_operand:INTM 2 "register_operand" "r")
  13062. + (match_operand:SI 3 "avr32_add_shift_immediate_operand" "Ku02"))))]
  13063. + ""
  13064. + "sub %0, %1, %2 << %3"
  13065. + [(set_attr "length" "4")
  13066. + (set_attr "cc" "<INTM:alu_cc_attr>")])
  13067. +
  13068. +
  13069. +(define_insn "subdi3"
  13070. + [(set (match_operand:DI 0 "register_operand" "=r,r")
  13071. + (minus:DI (match_operand:DI 1 "register_operand" "%0,r")
  13072. + (match_operand:DI 2 "register_operand" "r,r")))]
  13073. + ""
  13074. + "@
  13075. + sub %0, %2\;sbc %m0, %m0, %m2
  13076. + sub %0, %1, %2\;sbc %m0, %m1, %m2"
  13077. + [(set_attr "length" "6,8")
  13078. + (set_attr "type" "alu2")
  13079. + (set_attr "cc" "set_vncz")])
  13080. +
  13081. +
  13082. +(define_insn "sub<mode>_imm_predicable"
  13083. + [(set (match_operand:INTM 0 "register_operand" "+r")
  13084. + (minus:INTM (match_dup 0)
  13085. + (match_operand:INTM 1 "avr32_cond_immediate_operand" "Ks08")))]
  13086. + ""
  13087. + "sub%?\t%0, %1"
  13088. + [(set_attr "length" "4")
  13089. + (set_attr "cc" "cmp_cond_insn")
  13090. + (set_attr "predicable" "yes")])
  13091. +
  13092. +(define_insn "rsub<mode>_imm_predicable"
  13093. + [(set (match_operand:INTM 0 "register_operand" "+r")
  13094. + (minus:INTM (match_operand:INTM 1 "avr32_cond_immediate_operand" "Ks08")
  13095. + (match_dup 0)))]
  13096. + ""
  13097. + "rsub%?\t%0, %1"
  13098. + [(set_attr "length" "4")
  13099. + (set_attr "cc" "cmp_cond_insn")
  13100. + (set_attr "predicable" "yes")])
  13101. +
  13102. +;;=============================================================================
  13103. +;; multiply
  13104. +;;-----------------------------------------------------------------------------
  13105. +;; Multiply op1 and op2 and put the value in op0.
  13106. +;;=============================================================================
  13107. +
  13108. +
  13109. +(define_insn "mulqi3"
  13110. + [(set (match_operand:QI 0 "register_operand" "=r,r,r")
  13111. + (mult:QI (match_operand:QI 1 "register_operand" "%0,r,r")
  13112. + (match_operand:QI 2 "avr32_mul_operand" "r,r,Ks08")))]
  13113. + "!TARGET_NO_MUL_INSNS"
  13114. + {
  13115. + switch (which_alternative){
  13116. + case 0:
  13117. + return "mul %0, %2";
  13118. + case 1:
  13119. + return "mul %0, %1, %2";
  13120. + case 2:
  13121. + return "mul %0, %1, %2";
  13122. + default:
  13123. + gcc_unreachable();
  13124. + }
  13125. + }
  13126. + [(set_attr "type" "mulww_w,mulww_w,mulwh")
  13127. + (set_attr "length" "2,4,4")
  13128. + (set_attr "cc" "none")])
  13129. +
  13130. +(define_insn "mulsi3"
  13131. + [(set (match_operand:SI 0 "register_operand" "=r,r,r")
  13132. + (mult:SI (match_operand:SI 1 "register_operand" "%0,r,r")
  13133. + (match_operand:SI 2 "avr32_mul_operand" "r,r,Ks08")))]
  13134. + "!TARGET_NO_MUL_INSNS"
  13135. + {
  13136. + switch (which_alternative){
  13137. + case 0:
  13138. + return "mul %0, %2";
  13139. + case 1:
  13140. + return "mul %0, %1, %2";
  13141. + case 2:
  13142. + return "mul %0, %1, %2";
  13143. + default:
  13144. + gcc_unreachable();
  13145. + }
  13146. + }
  13147. + [(set_attr "type" "mulww_w,mulww_w,mulwh")
  13148. + (set_attr "length" "2,4,4")
  13149. + (set_attr "cc" "none")])
  13150. +
  13151. +
  13152. +(define_insn "mulhisi3"
  13153. + [(set (match_operand:SI 0 "register_operand" "=r")
  13154. + (mult:SI
  13155. + (sign_extend:SI (match_operand:HI 1 "register_operand" "%r"))
  13156. + (sign_extend:SI (match_operand:HI 2 "register_operand" "r"))))]
  13157. + "!TARGET_NO_MUL_INSNS && TARGET_DSP"
  13158. + "mulhh.w %0, %1:b, %2:b"
  13159. + [(set_attr "type" "mulhh")
  13160. + (set_attr "length" "4")
  13161. + (set_attr "cc" "none")])
  13162. +
  13163. +(define_peephole2
  13164. + [(match_scratch:DI 6 "r")
  13165. + (set (match_operand:SI 0 "register_operand" "")
  13166. + (mult:SI
  13167. + (sign_extend:SI (match_operand:HI 1 "register_operand" ""))
  13168. + (sign_extend:SI (match_operand:HI 2 "register_operand" ""))))
  13169. + (set (match_operand:SI 3 "register_operand" "")
  13170. + (ashiftrt:SI (match_dup 0)
  13171. + (const_int 16)))]
  13172. + "!TARGET_NO_MUL_INSNS && TARGET_DSP
  13173. + && (peep2_reg_dead_p(1, operands[0]) || (REGNO(operands[0]) == REGNO(operands[3])))"
  13174. + [(set (match_dup 4) (sign_extend:SI (match_dup 1)))
  13175. + (set (match_dup 6)
  13176. + (ashift:DI (mult:DI (sign_extend:DI (match_dup 4))
  13177. + (sign_extend:DI (match_dup 2)))
  13178. + (const_int 16)))
  13179. + (set (match_dup 3) (match_dup 5))]
  13180. +
  13181. + "{
  13182. + operands[4] = gen_rtx_REG(SImode, REGNO(operands[1]));
  13183. + operands[5] = gen_highpart (SImode, operands[4]);
  13184. + }"
  13185. + )
  13186. +
  13187. +(define_insn "mulnhisi3"
  13188. + [(set (match_operand:SI 0 "register_operand" "=r")
  13189. + (mult:SI
  13190. + (sign_extend:SI (neg:HI (match_operand:HI 1 "register_operand" "r")))
  13191. + (sign_extend:SI (match_operand:HI 2 "register_operand" "r"))))]
  13192. + "!TARGET_NO_MUL_INSNS && TARGET_DSP"
  13193. + "mulnhh.w %0, %1:b, %2:b"
  13194. + [(set_attr "type" "mulhh")
  13195. + (set_attr "length" "4")
  13196. + (set_attr "cc" "none")])
  13197. +
  13198. +(define_insn "machisi3"
  13199. + [(set (match_operand:SI 0 "register_operand" "+r")
  13200. + (plus:SI (mult:SI
  13201. + (sign_extend:SI (match_operand:HI 1 "register_operand" "%r"))
  13202. + (sign_extend:SI (match_operand:HI 2 "register_operand" "r")))
  13203. + (match_dup 0)))]
  13204. + "!TARGET_NO_MUL_INSNS && TARGET_DSP"
  13205. + "machh.w %0, %1:b, %2:b"
  13206. + [(set_attr "type" "machh_w")
  13207. + (set_attr "length" "4")
  13208. + (set_attr "cc" "none")])
  13209. +
  13210. +
  13211. +
  13212. +(define_insn "mulsidi3"
  13213. + [(set (match_operand:DI 0 "register_operand" "=r")
  13214. + (mult:DI
  13215. + (sign_extend:DI (match_operand:SI 1 "register_operand" "%r"))
  13216. + (sign_extend:DI (match_operand:SI 2 "register_operand" "r"))))]
  13217. + "!TARGET_NO_MUL_INSNS"
  13218. + "muls.d %0, %1, %2"
  13219. + [(set_attr "type" "mulww_d")
  13220. + (set_attr "length" "4")
  13221. + (set_attr "cc" "none")])
  13222. +
  13223. +(define_insn "umulsidi3"
  13224. + [(set (match_operand:DI 0 "register_operand" "=r")
  13225. + (mult:DI
  13226. + (zero_extend:DI (match_operand:SI 1 "register_operand" "%r"))
  13227. + (zero_extend:DI (match_operand:SI 2 "register_operand" "r"))))]
  13228. + "!TARGET_NO_MUL_INSNS"
  13229. + "mulu.d %0, %1, %2"
  13230. + [(set_attr "type" "mulww_d")
  13231. + (set_attr "length" "4")
  13232. + (set_attr "cc" "none")])
  13233. +
  13234. +(define_insn "*mulaccsi3"
  13235. + [(set (match_operand:SI 0 "register_operand" "+r")
  13236. + (plus:SI (mult:SI (match_operand:SI 1 "register_operand" "%r")
  13237. + (match_operand:SI 2 "register_operand" "r"))
  13238. + (match_dup 0)))]
  13239. + "!TARGET_NO_MUL_INSNS"
  13240. + "mac %0, %1, %2"
  13241. + [(set_attr "type" "macww_w")
  13242. + (set_attr "length" "4")
  13243. + (set_attr "cc" "none")])
  13244. +
  13245. +(define_insn "*mulaccsidi3"
  13246. + [(set (match_operand:DI 0 "register_operand" "+r")
  13247. + (plus:DI (mult:DI
  13248. + (sign_extend:DI (match_operand:SI 1 "register_operand" "%r"))
  13249. + (sign_extend:DI (match_operand:SI 2 "register_operand" "r")))
  13250. + (match_dup 0)))]
  13251. + "!TARGET_NO_MUL_INSNS"
  13252. + "macs.d %0, %1, %2"
  13253. + [(set_attr "type" "macww_d")
  13254. + (set_attr "length" "4")
  13255. + (set_attr "cc" "none")])
  13256. +
  13257. +(define_insn "*umulaccsidi3"
  13258. + [(set (match_operand:DI 0 "register_operand" "+r")
  13259. + (plus:DI (mult:DI
  13260. + (zero_extend:DI (match_operand:SI 1 "register_operand" "%r"))
  13261. + (zero_extend:DI (match_operand:SI 2 "register_operand" "r")))
  13262. + (match_dup 0)))]
  13263. + "!TARGET_NO_MUL_INSNS"
  13264. + "macu.d %0, %1, %2"
  13265. + [(set_attr "type" "macww_d")
  13266. + (set_attr "length" "4")
  13267. + (set_attr "cc" "none")])
  13268. +
  13269. +
  13270. +
  13271. +;; Try to avoid Write-After-Write hazards for mul operations
  13272. +;; if it can be done
  13273. +(define_peephole2
  13274. + [(set (match_operand:SI 0 "register_operand" "")
  13275. + (mult:SI
  13276. + (sign_extend:SI (match_operand 1 "general_operand" ""))
  13277. + (sign_extend:SI (match_operand 2 "general_operand" ""))))
  13278. + (set (match_dup 0)
  13279. + (match_operator:SI 3 "alu_operator" [(match_dup 0)
  13280. + (match_operand 4 "general_operand" "")]))]
  13281. + "peep2_reg_dead_p(1, operands[2])"
  13282. + [(set (match_dup 5)
  13283. + (mult:SI
  13284. + (sign_extend:SI (match_dup 1))
  13285. + (sign_extend:SI (match_dup 2))))
  13286. + (set (match_dup 0)
  13287. + (match_op_dup 3 [(match_dup 5)
  13288. + (match_dup 4)]))]
  13289. + "{operands[5] = gen_rtx_REG(SImode, REGNO(operands[2]));}"
  13290. + )
  13291. +
  13292. +
  13293. +
  13294. +;;=============================================================================
  13295. +;; DSP instructions
  13296. +;;=============================================================================
  13297. +(define_insn "mulsathh_h"
  13298. + [(set (match_operand:HI 0 "register_operand" "=r")
  13299. + (ss_truncate:HI (ashiftrt:SI (mult:SI (sign_extend:SI (match_operand:HI 1 "register_operand" "%r"))
  13300. + (sign_extend:SI (match_operand:HI 2 "register_operand" "r")))
  13301. + (const_int 15))))]
  13302. + "!TARGET_NO_MUL_INSNS && TARGET_DSP"
  13303. + "mulsathh.h\t%0, %1:b, %2:b"
  13304. + [(set_attr "length" "4")
  13305. + (set_attr "cc" "none")
  13306. + (set_attr "type" "mulhh")])
  13307. +
  13308. +(define_insn "mulsatrndhh_h"
  13309. + [(set (match_operand:HI 0 "register_operand" "=r")
  13310. + (ss_truncate:HI (ashiftrt:SI
  13311. + (plus:SI (mult:SI (sign_extend:SI (match_operand:HI 1 "register_operand" "%r"))
  13312. + (sign_extend:SI (match_operand:HI 2 "register_operand" "r")))
  13313. + (const_int 1073741824))
  13314. + (const_int 15))))]
  13315. + "!TARGET_NO_MUL_INSNS && TARGET_DSP"
  13316. + "mulsatrndhh.h\t%0, %1:b, %2:b"
  13317. + [(set_attr "length" "4")
  13318. + (set_attr "cc" "none")
  13319. + (set_attr "type" "mulhh")])
  13320. +
  13321. +(define_insn "mulsathh_w"
  13322. + [(set (match_operand:SI 0 "register_operand" "=r")
  13323. + (ss_truncate:SI (ashift:DI (mult:DI (sign_extend:DI (match_operand:HI 1 "register_operand" "%r"))
  13324. + (sign_extend:DI (match_operand:HI 2 "register_operand" "r")))
  13325. + (const_int 1))))]
  13326. + "!TARGET_NO_MUL_INSNS && TARGET_DSP"
  13327. + "mulsathh.w\t%0, %1:b, %2:b"
  13328. + [(set_attr "length" "4")
  13329. + (set_attr "cc" "none")
  13330. + (set_attr "type" "mulhh")])
  13331. +
  13332. +(define_insn "mulsatwh_w"
  13333. + [(set (match_operand:SI 0 "register_operand" "=r")
  13334. + (ss_truncate:SI (ashiftrt:DI (mult:DI (sign_extend:DI (match_operand:SI 1 "register_operand" "r"))
  13335. + (sign_extend:DI (match_operand:HI 2 "register_operand" "r")))
  13336. + (const_int 15))))]
  13337. + "!TARGET_NO_MUL_INSNS && TARGET_DSP"
  13338. + "mulsatwh.w\t%0, %1, %2:b"
  13339. + [(set_attr "length" "4")
  13340. + (set_attr "cc" "none")
  13341. + (set_attr "type" "mulwh")])
  13342. +
  13343. +(define_insn "mulsatrndwh_w"
  13344. + [(set (match_operand:SI 0 "register_operand" "=r")
  13345. + (ss_truncate:SI (ashiftrt:DI (plus:DI (mult:DI (sign_extend:DI (match_operand:SI 1 "register_operand" "r"))
  13346. + (sign_extend:DI (match_operand:HI 2 "register_operand" "r")))
  13347. + (const_int 1073741824))
  13348. + (const_int 15))))]
  13349. + "!TARGET_NO_MUL_INSNS && TARGET_DSP"
  13350. + "mulsatrndwh.w\t%0, %1, %2:b"
  13351. + [(set_attr "length" "4")
  13352. + (set_attr "cc" "none")
  13353. + (set_attr "type" "mulwh")])
  13354. +
  13355. +(define_insn "macsathh_w"
  13356. + [(set (match_operand:SI 0 "register_operand" "+r")
  13357. + (plus:SI (match_dup 0)
  13358. + (ss_truncate:SI (ashift:DI (mult:DI (sign_extend:DI (match_operand:HI 1 "register_operand" "%r"))
  13359. + (sign_extend:DI (match_operand:HI 2 "register_operand" "r")))
  13360. + (const_int 1)))))]
  13361. + "!TARGET_NO_MUL_INSNS && TARGET_DSP"
  13362. + "macsathh.w\t%0, %1:b, %2:b"
  13363. + [(set_attr "length" "4")
  13364. + (set_attr "cc" "none")
  13365. + (set_attr "type" "mulhh")])
  13366. +
  13367. +
  13368. +(define_insn "mulwh_d"
  13369. + [(set (match_operand:DI 0 "register_operand" "=r")
  13370. + (ashift:DI (mult:DI (sign_extend:DI (match_operand:SI 1 "register_operand" "r"))
  13371. + (sign_extend:DI (match_operand:HI 2 "register_operand" "r")))
  13372. + (const_int 16)))]
  13373. + "!TARGET_NO_MUL_INSNS && TARGET_DSP"
  13374. + "mulwh.d\t%0, %1, %2:b"
  13375. + [(set_attr "length" "4")
  13376. + (set_attr "cc" "none")
  13377. + (set_attr "type" "mulwh")])
  13378. +
  13379. +
  13380. +(define_insn "mulnwh_d"
  13381. + [(set (match_operand:DI 0 "register_operand" "=r")
  13382. + (ashift:DI (mult:DI (not:DI (sign_extend:DI (match_operand:SI 1 "register_operand" "r")))
  13383. + (sign_extend:DI (match_operand:HI 2 "register_operand" "r")))
  13384. + (const_int 16)))]
  13385. + "!TARGET_NO_MUL_INSNS && TARGET_DSP"
  13386. + "mulnwh.d\t%0, %1, %2:b"
  13387. + [(set_attr "length" "4")
  13388. + (set_attr "cc" "none")
  13389. + (set_attr "type" "mulwh")])
  13390. +
  13391. +(define_insn "macwh_d"
  13392. + [(set (match_operand:DI 0 "register_operand" "+r")
  13393. + (plus:DI (match_dup 0)
  13394. + (ashift:DI (mult:DI (sign_extend:DI (match_operand:SI 1 "register_operand" "%r"))
  13395. + (sign_extend:DI (match_operand:HI 2 "register_operand" "r")))
  13396. + (const_int 16))))]
  13397. + "!TARGET_NO_MUL_INSNS && TARGET_DSP"
  13398. + "macwh.d\t%0, %1, %2:b"
  13399. + [(set_attr "length" "4")
  13400. + (set_attr "cc" "none")
  13401. + (set_attr "type" "mulwh")])
  13402. +
  13403. +(define_insn "machh_d"
  13404. + [(set (match_operand:DI 0 "register_operand" "+r")
  13405. + (plus:DI (match_dup 0)
  13406. + (mult:DI (sign_extend:DI (match_operand:HI 1 "register_operand" "%r"))
  13407. + (sign_extend:DI (match_operand:HI 2 "register_operand" "r")))))]
  13408. + "!TARGET_NO_MUL_INSNS && TARGET_DSP"
  13409. + "machh.d\t%0, %1:b, %2:b"
  13410. + [(set_attr "length" "4")
  13411. + (set_attr "cc" "none")
  13412. + (set_attr "type" "mulwh")])
  13413. +
  13414. +(define_insn "satadd_w"
  13415. + [(set (match_operand:SI 0 "register_operand" "=r")
  13416. + (ss_plus:SI (match_operand:SI 1 "register_operand" "r")
  13417. + (match_operand:SI 2 "register_operand" "r")))]
  13418. + "TARGET_DSP"
  13419. + "satadd.w\t%0, %1, %2"
  13420. + [(set_attr "length" "4")
  13421. + (set_attr "cc" "none")
  13422. + (set_attr "type" "alu_sat")])
  13423. +
  13424. +(define_insn "satsub_w"
  13425. + [(set (match_operand:SI 0 "register_operand" "=r")
  13426. + (ss_minus:SI (match_operand:SI 1 "register_operand" "r")
  13427. + (match_operand:SI 2 "register_operand" "r")))]
  13428. + "TARGET_DSP"
  13429. + "satsub.w\t%0, %1, %2"
  13430. + [(set_attr "length" "4")
  13431. + (set_attr "cc" "none")
  13432. + (set_attr "type" "alu_sat")])
  13433. +
  13434. +(define_insn "satadd_h"
  13435. + [(set (match_operand:HI 0 "register_operand" "=r")
  13436. + (ss_plus:HI (match_operand:HI 1 "register_operand" "r")
  13437. + (match_operand:HI 2 "register_operand" "r")))]
  13438. + "TARGET_DSP"
  13439. + "satadd.h\t%0, %1, %2"
  13440. + [(set_attr "length" "4")
  13441. + (set_attr "cc" "none")
  13442. + (set_attr "type" "alu_sat")])
  13443. +
  13444. +(define_insn "satsub_h"
  13445. + [(set (match_operand:HI 0 "register_operand" "=r")
  13446. + (ss_minus:HI (match_operand:HI 1 "register_operand" "r")
  13447. + (match_operand:HI 2 "register_operand" "r")))]
  13448. + "TARGET_DSP"
  13449. + "satsub.h\t%0, %1, %2"
  13450. + [(set_attr "length" "4")
  13451. + (set_attr "cc" "none")
  13452. + (set_attr "type" "alu_sat")])
  13453. +
  13454. +
  13455. +;;=============================================================================
  13456. +;; smin
  13457. +;;-----------------------------------------------------------------------------
  13458. +;; Set reg0 to the smallest value of reg1 and reg2. It is used for signed
  13459. +;; values in the registers.
  13460. +;;=============================================================================
  13461. +(define_insn "sminsi3"
  13462. + [(set (match_operand:SI 0 "register_operand" "=r")
  13463. + (smin:SI (match_operand:SI 1 "register_operand" "r")
  13464. + (match_operand:SI 2 "register_operand" "r")))]
  13465. + ""
  13466. + "min %0, %1, %2"
  13467. + [(set_attr "length" "4")
  13468. + (set_attr "cc" "none")])
  13469. +
  13470. +;;=============================================================================
  13471. +;; smax
  13472. +;;-----------------------------------------------------------------------------
  13473. +;; Set reg0 to the largest value of reg1 and reg2. It is used for signed
  13474. +;; values in the registers.
  13475. +;;=============================================================================
  13476. +(define_insn "smaxsi3"
  13477. + [(set (match_operand:SI 0 "register_operand" "=r")
  13478. + (smax:SI (match_operand:SI 1 "register_operand" "r")
  13479. + (match_operand:SI 2 "register_operand" "r")))]
  13480. + ""
  13481. + "max %0, %1, %2"
  13482. + [(set_attr "length" "4")
  13483. + (set_attr "cc" "none")])
  13484. +
  13485. +
  13486. +
  13487. +;;=============================================================================
  13488. +;; Logical operations
  13489. +;;-----------------------------------------------------------------------------
  13490. +
  13491. +
  13492. +;; Split up simple DImode logical operations. Simply perform the logical
  13493. +;; operation on the upper and lower halves of the registers.
  13494. +(define_split
  13495. + [(set (match_operand:DI 0 "register_operand" "")
  13496. + (match_operator:DI 6 "logical_binary_operator"
  13497. + [(match_operand:DI 1 "register_operand" "")
  13498. + (match_operand:DI 2 "register_operand" "")]))]
  13499. + "reload_completed"
  13500. + [(set (match_dup 0) (match_op_dup:SI 6 [(match_dup 1) (match_dup 2)]))
  13501. + (set (match_dup 3) (match_op_dup:SI 6 [(match_dup 4) (match_dup 5)]))]
  13502. + "
  13503. + {
  13504. + operands[3] = gen_highpart (SImode, operands[0]);
  13505. + operands[0] = gen_lowpart (SImode, operands[0]);
  13506. + operands[4] = gen_highpart (SImode, operands[1]);
  13507. + operands[1] = gen_lowpart (SImode, operands[1]);
  13508. + operands[5] = gen_highpart (SImode, operands[2]);
  13509. + operands[2] = gen_lowpart (SImode, operands[2]);
  13510. + }"
  13511. +)
  13512. +
  13513. +;;=============================================================================
  13514. +;; Logical operations with shifted operand
  13515. +;;=============================================================================
  13516. +(define_insn "<code>si_lshift"
  13517. + [(set (match_operand:SI 0 "register_operand" "=r")
  13518. + (logical:SI (match_operator:SI 4 "logical_shift_operator"
  13519. + [(match_operand:SI 2 "register_operand" "r")
  13520. + (match_operand:SI 3 "immediate_operand" "Ku05")])
  13521. + (match_operand:SI 1 "register_operand" "r")))]
  13522. + ""
  13523. + {
  13524. + if ( GET_CODE(operands[4]) == ASHIFT )
  13525. + return "<logical_insn>\t%0, %1, %2 << %3";
  13526. + else
  13527. + return "<logical_insn>\t%0, %1, %2 >> %3";
  13528. + }
  13529. +
  13530. + [(set_attr "cc" "set_z")]
  13531. +)
  13532. +
  13533. +
  13534. +;;************************************************
  13535. +;; Peepholes for detecting logical operantions
  13536. +;; with shifted operands
  13537. +;;************************************************
  13538. +
  13539. +(define_peephole
  13540. + [(set (match_operand:SI 3 "register_operand" "")
  13541. + (match_operator:SI 5 "logical_shift_operator"
  13542. + [(match_operand:SI 1 "register_operand" "")
  13543. + (match_operand:SI 2 "immediate_operand" "")]))
  13544. + (set (match_operand:SI 0 "register_operand" "")
  13545. + (logical:SI (match_operand:SI 4 "register_operand" "")
  13546. + (match_dup 3)))]
  13547. + "(dead_or_set_p(insn, operands[3])) || (REGNO(operands[3]) == REGNO(operands[0]))"
  13548. + {
  13549. + if ( GET_CODE(operands[5]) == ASHIFT )
  13550. + return "<logical_insn>\t%0, %4, %1 << %2";
  13551. + else
  13552. + return "<logical_insn>\t%0, %4, %1 >> %2";
  13553. + }
  13554. + [(set_attr "cc" "set_z")]
  13555. + )
  13556. +
  13557. +(define_peephole
  13558. + [(set (match_operand:SI 3 "register_operand" "")
  13559. + (match_operator:SI 5 "logical_shift_operator"
  13560. + [(match_operand:SI 1 "register_operand" "")
  13561. + (match_operand:SI 2 "immediate_operand" "")]))
  13562. + (set (match_operand:SI 0 "register_operand" "")
  13563. + (logical:SI (match_dup 3)
  13564. + (match_operand:SI 4 "register_operand" "")))]
  13565. + "(dead_or_set_p(insn, operands[3])) || (REGNO(operands[3]) == REGNO(operands[0]))"
  13566. + {
  13567. + if ( GET_CODE(operands[5]) == ASHIFT )
  13568. + return "<logical_insn>\t%0, %4, %1 << %2";
  13569. + else
  13570. + return "<logical_insn>\t%0, %4, %1 >> %2";
  13571. + }
  13572. + [(set_attr "cc" "set_z")]
  13573. + )
  13574. +
  13575. +
  13576. +(define_peephole2
  13577. + [(set (match_operand:SI 0 "register_operand" "")
  13578. + (match_operator:SI 5 "logical_shift_operator"
  13579. + [(match_operand:SI 1 "register_operand" "")
  13580. + (match_operand:SI 2 "immediate_operand" "")]))
  13581. + (set (match_operand:SI 3 "register_operand" "")
  13582. + (logical:SI (match_operand:SI 4 "register_operand" "")
  13583. + (match_dup 0)))]
  13584. + "(peep2_reg_dead_p(2, operands[0])) || (REGNO(operands[3]) == REGNO(operands[0]))"
  13585. +
  13586. + [(set (match_dup 3)
  13587. + (logical:SI (match_op_dup:SI 5 [(match_dup 1) (match_dup 2)])
  13588. + (match_dup 4)))]
  13589. +
  13590. + ""
  13591. +)
  13592. +
  13593. +(define_peephole2
  13594. + [(set (match_operand:SI 0 "register_operand" "")
  13595. + (match_operator:SI 5 "logical_shift_operator"
  13596. + [(match_operand:SI 1 "register_operand" "")
  13597. + (match_operand:SI 2 "immediate_operand" "")]))
  13598. + (set (match_operand:SI 3 "register_operand" "")
  13599. + (logical:SI (match_dup 0)
  13600. + (match_operand:SI 4 "register_operand" "")))]
  13601. + "(peep2_reg_dead_p(2, operands[0])) || (REGNO(operands[3]) == REGNO(operands[0]))"
  13602. +
  13603. + [(set (match_dup 3)
  13604. + (logical:SI (match_op_dup:SI 5 [(match_dup 1) (match_dup 2)])
  13605. + (match_dup 4)))]
  13606. +
  13607. + ""
  13608. +)
  13609. +
  13610. +
  13611. +;;=============================================================================
  13612. +;; and
  13613. +;;-----------------------------------------------------------------------------
  13614. +;; Store the result after a bitwise logical-and between reg0 and reg2 in reg0.
  13615. +;;=============================================================================
  13616. +
  13617. +(define_insn "andnsi"
  13618. + [(set (match_operand:SI 0 "register_operand" "+r")
  13619. + (and:SI (match_dup 0)
  13620. + (not:SI (match_operand:SI 1 "register_operand" "r"))))]
  13621. + ""
  13622. + "andn %0, %1"
  13623. + [(set_attr "cc" "set_z")
  13624. + (set_attr "length" "2")]
  13625. +)
  13626. +
  13627. +
  13628. +(define_insn "andsi3"
  13629. + [(set (match_operand:SI 0 "avr32_rmw_memory_or_register_operand" "=Y,r,r,r, r, r,r,r,r,r")
  13630. + (and:SI (match_operand:SI 1 "avr32_rmw_memory_or_register_operand" "%0,r,0,0, 0, 0,0,0,0,r" )
  13631. + (match_operand:SI 2 "nonmemory_operand" " N,M,N,Ku16,Ks17,J,L,r,i,r")))]
  13632. + ""
  13633. + "@
  13634. + memc\t%0, %z2
  13635. + bfextu\t%0, %1, 0, %z2
  13636. + cbr\t%0, %z2
  13637. + andl\t%0, %2, COH
  13638. + andl\t%0, lo(%2)
  13639. + andh\t%0, hi(%2), COH
  13640. + andh\t%0, hi(%2)
  13641. + and\t%0, %2
  13642. + andh\t%0, hi(%2)\;andl\t%0, lo(%2)
  13643. + and\t%0, %1, %2"
  13644. +
  13645. + [(set_attr "length" "4,4,2,4,4,4,4,2,8,4")
  13646. + (set_attr "cc" "none,set_z,set_z,set_z,set_z,set_z,set_z,set_z,set_z,set_z")])
  13647. +
  13648. +
  13649. +
  13650. +(define_insn "anddi3"
  13651. + [(set (match_operand:DI 0 "register_operand" "=&r,&r")
  13652. + (and:DI (match_operand:DI 1 "register_operand" "%0,r")
  13653. + (match_operand:DI 2 "register_operand" "r,r")))]
  13654. + ""
  13655. + "#"
  13656. + [(set_attr "length" "8")
  13657. + (set_attr "cc" "clobber")]
  13658. +)
  13659. +
  13660. +;;=============================================================================
  13661. +;; or
  13662. +;;-----------------------------------------------------------------------------
  13663. +;; Store the result after a bitwise inclusive-or between reg0 and reg2 in reg0.
  13664. +;;=============================================================================
  13665. +
  13666. +(define_insn "iorsi3"
  13667. + [(set (match_operand:SI 0 "avr32_rmw_memory_or_register_operand" "=Y,r,r, r,r,r,r")
  13668. + (ior:SI (match_operand:SI 1 "avr32_rmw_memory_or_register_operand" "%0,0,0, 0,0,0,r" )
  13669. + (match_operand:SI 2 "nonmemory_operand" " O,O,Ku16,J,r,i,r")))]
  13670. + ""
  13671. + "@
  13672. + mems\t%0, %p2
  13673. + sbr\t%0, %p2
  13674. + orl\t%0, %2
  13675. + orh\t%0, hi(%2)
  13676. + or\t%0, %2
  13677. + orh\t%0, hi(%2)\;orl\t%0, lo(%2)
  13678. + or\t%0, %1, %2"
  13679. +
  13680. + [(set_attr "length" "4,2,4,4,2,8,4")
  13681. + (set_attr "cc" "none,set_z,set_z,set_z,set_z,set_z,set_z")])
  13682. +
  13683. +
  13684. +(define_insn "iordi3"
  13685. + [(set (match_operand:DI 0 "register_operand" "=&r,&r")
  13686. + (ior:DI (match_operand:DI 1 "register_operand" "%0,r")
  13687. + (match_operand:DI 2 "register_operand" "r,r")))]
  13688. + ""
  13689. + "#"
  13690. + [(set_attr "length" "8")
  13691. + (set_attr "cc" "clobber")]
  13692. +)
  13693. +
  13694. +;;=============================================================================
  13695. +;; xor bytes
  13696. +;;-----------------------------------------------------------------------------
  13697. +;; Store the result after a bitwise exclusive-or between reg0 and reg2 in reg0.
  13698. +;;=============================================================================
  13699. +
  13700. +(define_insn "xorsi3"
  13701. + [(set (match_operand:SI 0 "avr32_rmw_memory_or_register_operand" "=Y,r, r,r,r,r")
  13702. + (xor:SI (match_operand:SI 1 "avr32_rmw_memory_or_register_operand" "%0,0, 0,0,0,r" )
  13703. + (match_operand:SI 2 "nonmemory_operand" " O,Ku16,J,r,i,r")))]
  13704. + ""
  13705. + "@
  13706. + memt\t%0, %p2
  13707. + eorl\t%0, %2
  13708. + eorh\t%0, hi(%2)
  13709. + eor\t%0, %2
  13710. + eorh\t%0, hi(%2)\;eorl\t%0, lo(%2)
  13711. + eor\t%0, %1, %2"
  13712. +
  13713. + [(set_attr "length" "4,4,4,2,8,4")
  13714. + (set_attr "cc" "none,set_z,set_z,set_z,set_z,set_z")])
  13715. +
  13716. +(define_insn "xordi3"
  13717. + [(set (match_operand:DI 0 "register_operand" "=&r,&r")
  13718. + (xor:DI (match_operand:DI 1 "register_operand" "%0,r")
  13719. + (match_operand:DI 2 "register_operand" "r,r")))]
  13720. + ""
  13721. + "#"
  13722. + [(set_attr "length" "8")
  13723. + (set_attr "cc" "clobber")]
  13724. +)
  13725. +
  13726. +;;=============================================================================
  13727. +;; Three operand predicable insns
  13728. +;;=============================================================================
  13729. +
  13730. +(define_insn "<predicable_insn3><mode>_predicable"
  13731. + [(set (match_operand:INTM 0 "register_operand" "=r")
  13732. + (predicable_op3:INTM (match_operand:INTM 1 "register_operand" "<predicable_commutative3>r")
  13733. + (match_operand:INTM 2 "register_operand" "r")))]
  13734. + "TARGET_V2_INSNS"
  13735. + "<predicable_insn3>%?\t%0, %1, %2"
  13736. + [(set_attr "length" "4")
  13737. + (set_attr "cc" "cmp_cond_insn")
  13738. + (set_attr "predicable" "yes")]
  13739. +)
  13740. +
  13741. +(define_insn_and_split "<predicable_insn3><mode>_imm_clobber_predicable"
  13742. + [(parallel
  13743. + [(set (match_operand:INTM 0 "register_operand" "=r")
  13744. + (predicable_op3:INTM (match_operand:INTM 1 "register_operand" "<predicable_commutative3>r")
  13745. + (match_operand:INTM 2 "avr32_mov_immediate_operand" "JKs21")))
  13746. + (clobber (match_operand:INTM 3 "register_operand" "=&r"))])]
  13747. + "TARGET_V2_INSNS"
  13748. + {
  13749. + if ( current_insn_predicate != NULL_RTX )
  13750. + {
  13751. + if ( avr32_const_ok_for_constraint_p (INTVAL (operands[2]), 'K', "Ks08") )
  13752. + return "%! mov%?\t%3, %2\;<predicable_insn3>%?\t%0, %1, %3";
  13753. + else if ( avr32_const_ok_for_constraint_p (INTVAL (operands[2]), 'K', "Ks21") )
  13754. + return "%! mov\t%3, %2\;<predicable_insn3>%?\t%0, %1, %3";
  13755. + else
  13756. + return "%! movh\t%3, hi(%2)\;<predicable_insn3>%?\t%0, %1, %3";
  13757. + }
  13758. + else
  13759. + {
  13760. + if ( !avr32_cond_imm_clobber_splittable (insn, operands) )
  13761. + {
  13762. + if ( avr32_const_ok_for_constraint_p (INTVAL (operands[2]), 'K', "Ks08") )
  13763. + return "mov%?\t%3, %2\;<predicable_insn3>%?\t%0, %1, %3";
  13764. + else if ( avr32_const_ok_for_constraint_p (INTVAL (operands[2]), 'K', "Ks21") )
  13765. + return "mov\t%3, %2\;<predicable_insn3>%?\t%0, %1, %3";
  13766. + else
  13767. + return "movh\t%3, hi(%2)\;<predicable_insn3>%?\t%0, %1, %3";
  13768. + }
  13769. + return "#";
  13770. + }
  13771. +
  13772. + }
  13773. + ;; If we find out that we could not actually do if-conversion on the block
  13774. + ;; containing this insn we convert it back to normal immediate format
  13775. + ;; to avoid outputing a redundant move insn
  13776. + ;; Do not split until after we have checked if we can make the insn
  13777. + ;; conditional.
  13778. + "(GET_CODE (PATTERN (insn)) != COND_EXEC
  13779. + && cfun->machine->ifcvt_after_reload
  13780. + && avr32_cond_imm_clobber_splittable (insn, operands))"
  13781. + [(set (match_dup 0)
  13782. + (predicable_op3:INTM (match_dup 1)
  13783. + (match_dup 2)))]
  13784. + ""
  13785. + [(set_attr "length" "8")
  13786. + (set_attr "cc" "cmp_cond_insn")
  13787. + (set_attr "predicable" "yes")]
  13788. + )
  13789. +
  13790. +
  13791. +;;=============================================================================
  13792. +;; Zero extend predicable insns
  13793. +;;=============================================================================
  13794. +(define_insn_and_split "zero_extendhisi_clobber_predicable"
  13795. + [(parallel
  13796. + [(set (match_operand:SI 0 "register_operand" "=r")
  13797. + (zero_extend:SI (match_operand:HI 1 "register_operand" "r")))
  13798. + (clobber (match_operand:SI 2 "register_operand" "=&r"))])]
  13799. + "TARGET_V2_INSNS"
  13800. + {
  13801. + if ( current_insn_predicate != NULL_RTX )
  13802. + {
  13803. + return "%! mov\t%2, 0xffff\;and%?\t%0, %1, %2";
  13804. + }
  13805. + else
  13806. + {
  13807. + return "#";
  13808. + }
  13809. +
  13810. + }
  13811. + ;; If we find out that we could not actually do if-conversion on the block
  13812. + ;; containing this insn we convert it back to normal immediate format
  13813. + ;; to avoid outputing a redundant move insn
  13814. + ;; Do not split until after we have checked if we can make the insn
  13815. + ;; conditional.
  13816. + "(GET_CODE (PATTERN (insn)) != COND_EXEC
  13817. + && cfun->machine->ifcvt_after_reload)"
  13818. + [(set (match_dup 0)
  13819. + (zero_extend:SI (match_dup 1)))]
  13820. + ""
  13821. + [(set_attr "length" "8")
  13822. + (set_attr "cc" "cmp_cond_insn")
  13823. + (set_attr "predicable" "yes")]
  13824. + )
  13825. +
  13826. +(define_insn_and_split "zero_extendqisi_clobber_predicable"
  13827. + [(parallel
  13828. + [(set (match_operand:SI 0 "register_operand" "=r")
  13829. + (zero_extend:SI (match_operand:QI 1 "register_operand" "r")))
  13830. + (clobber (match_operand:SI 2 "register_operand" "=&r"))])]
  13831. + "TARGET_V2_INSNS"
  13832. + {
  13833. + if ( current_insn_predicate != NULL_RTX )
  13834. + {
  13835. + return "%! mov\t%2, 0xff\;and%?\t%0, %1, %2";
  13836. + }
  13837. + else
  13838. + {
  13839. + return "#";
  13840. + }
  13841. +
  13842. + }
  13843. + ;; If we find out that we could not actually do if-conversion on the block
  13844. + ;; containing this insn we convert it back to normal immediate format
  13845. + ;; to avoid outputing a redundant move insn
  13846. + ;; Do not split until after we have checked if we can make the insn
  13847. + ;; conditional.
  13848. + "(GET_CODE (PATTERN (insn)) != COND_EXEC
  13849. + && cfun->machine->ifcvt_after_reload)"
  13850. + [(set (match_dup 0)
  13851. + (zero_extend:SI (match_dup 1)))]
  13852. + ""
  13853. + [(set_attr "length" "8")
  13854. + (set_attr "cc" "cmp_cond_insn")
  13855. + (set_attr "predicable" "yes")]
  13856. + )
  13857. +
  13858. +(define_insn_and_split "zero_extendqihi_clobber_predicable"
  13859. + [(parallel
  13860. + [(set (match_operand:HI 0 "register_operand" "=r")
  13861. + (zero_extend:HI (match_operand:QI 1 "register_operand" "r")))
  13862. + (clobber (match_operand:SI 2 "register_operand" "=&r"))])]
  13863. + "TARGET_V2_INSNS"
  13864. + {
  13865. + if ( current_insn_predicate != NULL_RTX )
  13866. + {
  13867. + return "%! mov\t%2, 0xff\;and%?\t%0, %1, %2";
  13868. + }
  13869. + else
  13870. + {
  13871. + return "#";
  13872. + }
  13873. +
  13874. + }
  13875. + ;; If we find out that we could not actually do if-conversion on the block
  13876. + ;; containing this insn we convert it back to normal immediate format
  13877. + ;; to avoid outputing a redundant move insn
  13878. + ;; Do not split until after we have checked if we can make the insn
  13879. + ;; conditional.
  13880. + "(GET_CODE (PATTERN (insn)) != COND_EXEC
  13881. + && cfun->machine->ifcvt_after_reload)"
  13882. + [(set (match_dup 0)
  13883. + (zero_extend:HI (match_dup 1)))]
  13884. + ""
  13885. + [(set_attr "length" "8")
  13886. + (set_attr "cc" "cmp_cond_insn")
  13887. + (set_attr "predicable" "yes")]
  13888. + )
  13889. +;;=============================================================================
  13890. +;; divmod
  13891. +;;-----------------------------------------------------------------------------
  13892. +;; Signed division that produces both a quotient and a remainder.
  13893. +;;=============================================================================
  13894. +
  13895. +(define_expand "divmodsi4"
  13896. + [(parallel [
  13897. + (parallel [
  13898. + (set (match_operand:SI 0 "register_operand" "=r")
  13899. + (div:SI (match_operand:SI 1 "register_operand" "r")
  13900. + (match_operand:SI 2 "register_operand" "r")))
  13901. + (set (match_operand:SI 3 "register_operand" "=r")
  13902. + (mod:SI (match_dup 1)
  13903. + (match_dup 2)))])
  13904. + (use (match_dup 4))])]
  13905. + ""
  13906. + {
  13907. + if (can_create_pseudo_p ()) {
  13908. + operands[4] = gen_reg_rtx (DImode);
  13909. + emit_insn(gen_divmodsi4_internal(operands[4],operands[1],operands[2]));
  13910. + emit_move_insn(operands[0], gen_rtx_SUBREG( SImode, operands[4], 4));
  13911. + emit_move_insn(operands[3], gen_rtx_SUBREG( SImode, operands[4], 0));
  13912. + DONE;
  13913. + } else {
  13914. + FAIL;
  13915. + }
  13916. + })
  13917. +
  13918. +
  13919. +(define_insn "divmodsi4_internal"
  13920. + [(set (match_operand:DI 0 "register_operand" "=r")
  13921. + (unspec:DI [(match_operand:SI 1 "register_operand" "r")
  13922. + (match_operand:SI 2 "register_operand" "r")]
  13923. + UNSPEC_DIVMODSI4_INTERNAL))]
  13924. + ""
  13925. + "divs %0, %1, %2"
  13926. + [(set_attr "type" "div")
  13927. + (set_attr "cc" "none")])
  13928. +
  13929. +
  13930. +;;=============================================================================
  13931. +;; udivmod
  13932. +;;-----------------------------------------------------------------------------
  13933. +;; Unsigned division that produces both a quotient and a remainder.
  13934. +;;=============================================================================
  13935. +(define_expand "udivmodsi4"
  13936. + [(parallel [
  13937. + (parallel [
  13938. + (set (match_operand:SI 0 "register_operand" "=r")
  13939. + (udiv:SI (match_operand:SI 1 "register_operand" "r")
  13940. + (match_operand:SI 2 "register_operand" "r")))
  13941. + (set (match_operand:SI 3 "register_operand" "=r")
  13942. + (umod:SI (match_dup 1)
  13943. + (match_dup 2)))])
  13944. + (use (match_dup 4))])]
  13945. + ""
  13946. + {
  13947. + if (can_create_pseudo_p ()) {
  13948. + operands[4] = gen_reg_rtx (DImode);
  13949. +
  13950. + emit_insn(gen_udivmodsi4_internal(operands[4],operands[1],operands[2]));
  13951. + emit_move_insn(operands[0], gen_rtx_SUBREG( SImode, operands[4], 4));
  13952. + emit_move_insn(operands[3], gen_rtx_SUBREG( SImode, operands[4], 0));
  13953. +
  13954. + DONE;
  13955. + } else {
  13956. + FAIL;
  13957. + }
  13958. + })
  13959. +
  13960. +(define_insn "udivmodsi4_internal"
  13961. + [(set (match_operand:DI 0 "register_operand" "=r")
  13962. + (unspec:DI [(match_operand:SI 1 "register_operand" "r")
  13963. + (match_operand:SI 2 "register_operand" "r")]
  13964. + UNSPEC_UDIVMODSI4_INTERNAL))]
  13965. + ""
  13966. + "divu %0, %1, %2"
  13967. + [(set_attr "type" "div")
  13968. + (set_attr "cc" "none")])
  13969. +
  13970. +
  13971. +;;=============================================================================
  13972. +;; Arithmetic-shift left
  13973. +;;-----------------------------------------------------------------------------
  13974. +;; Arithmetic-shift reg0 left by reg2 or immediate value.
  13975. +;;=============================================================================
  13976. +
  13977. +(define_insn "ashlsi3"
  13978. + [(set (match_operand:SI 0 "register_operand" "=r,r,r")
  13979. + (ashift:SI (match_operand:SI 1 "register_operand" "r,0,r")
  13980. + (match_operand:SI 2 "register_const_int_operand" "r,Ku05,Ku05")))]
  13981. + ""
  13982. + "@
  13983. + lsl %0, %1, %2
  13984. + lsl %0, %2
  13985. + lsl %0, %1, %2"
  13986. + [(set_attr "length" "4,2,4")
  13987. + (set_attr "cc" "set_ncz")])
  13988. +
  13989. +;;=============================================================================
  13990. +;; Arithmetic-shift right
  13991. +;;-----------------------------------------------------------------------------
  13992. +;; Arithmetic-shift reg0 right by an immediate value.
  13993. +;;=============================================================================
  13994. +
  13995. +(define_insn "ashrsi3"
  13996. + [(set (match_operand:SI 0 "register_operand" "=r,r,r")
  13997. + (ashiftrt:SI (match_operand:SI 1 "register_operand" "r,0,r")
  13998. + (match_operand:SI 2 "register_const_int_operand" "r,Ku05,Ku05")))]
  13999. + ""
  14000. + "@
  14001. + asr %0, %1, %2
  14002. + asr %0, %2
  14003. + asr %0, %1, %2"
  14004. + [(set_attr "length" "4,2,4")
  14005. + (set_attr "cc" "set_ncz")])
  14006. +
  14007. +;;=============================================================================
  14008. +;; Logical shift right
  14009. +;;-----------------------------------------------------------------------------
  14010. +;; Logical shift reg0 right by an immediate value.
  14011. +;;=============================================================================
  14012. +
  14013. +(define_insn "lshrsi3"
  14014. + [(set (match_operand:SI 0 "register_operand" "=r,r,r")
  14015. + (lshiftrt:SI (match_operand:SI 1 "register_operand" "r,0,r")
  14016. + (match_operand:SI 2 "register_const_int_operand" "r,Ku05,Ku05")))]
  14017. + ""
  14018. + "@
  14019. + lsr %0, %1, %2
  14020. + lsr %0, %2
  14021. + lsr %0, %1, %2"
  14022. + [(set_attr "length" "4,2,4")
  14023. + (set_attr "cc" "set_ncz")])
  14024. +
  14025. +
  14026. +;;=============================================================================
  14027. +;; neg
  14028. +;;-----------------------------------------------------------------------------
  14029. +;; Negate operand 1 and store the result in operand 0.
  14030. +;;=============================================================================
  14031. +(define_insn "negsi2"
  14032. + [(set (match_operand:SI 0 "register_operand" "=r,r")
  14033. + (neg:SI (match_operand:SI 1 "register_operand" "0,r")))]
  14034. + ""
  14035. + "@
  14036. + neg\t%0
  14037. + rsub\t%0, %1, 0"
  14038. + [(set_attr "length" "2,4")
  14039. + (set_attr "cc" "set_vncz")])
  14040. +
  14041. +(define_insn "negsi2_predicable"
  14042. + [(set (match_operand:SI 0 "register_operand" "+r")
  14043. + (neg:SI (match_dup 0)))]
  14044. + "TARGET_V2_INSNS"
  14045. + "rsub%?\t%0, 0"
  14046. + [(set_attr "length" "4")
  14047. + (set_attr "cc" "cmp_cond_insn")
  14048. + (set_attr "predicable" "yes")])
  14049. +
  14050. +;;=============================================================================
  14051. +;; abs
  14052. +;;-----------------------------------------------------------------------------
  14053. +;; Store the absolute value of operand 1 into operand 0.
  14054. +;;=============================================================================
  14055. +(define_insn "abssi2"
  14056. + [(set (match_operand:SI 0 "register_operand" "=r")
  14057. + (abs:SI (match_operand:SI 1 "register_operand" "0")))]
  14058. + ""
  14059. + "abs\t%0"
  14060. + [(set_attr "length" "2")
  14061. + (set_attr "cc" "set_z")])
  14062. +
  14063. +
  14064. +;;=============================================================================
  14065. +;; one_cmpl
  14066. +;;-----------------------------------------------------------------------------
  14067. +;; Store the bitwise-complement of operand 1 into operand 0.
  14068. +;;=============================================================================
  14069. +
  14070. +(define_insn "one_cmplsi2"
  14071. + [(set (match_operand:SI 0 "register_operand" "=r,r")
  14072. + (not:SI (match_operand:SI 1 "register_operand" "0,r")))]
  14073. + ""
  14074. + "@
  14075. + com\t%0
  14076. + rsub\t%0, %1, -1"
  14077. + [(set_attr "length" "2,4")
  14078. + (set_attr "cc" "set_z")])
  14079. +
  14080. +
  14081. +(define_insn "one_cmplsi2_predicable"
  14082. + [(set (match_operand:SI 0 "register_operand" "+r")
  14083. + (not:SI (match_dup 0)))]
  14084. + "TARGET_V2_INSNS"
  14085. + "rsub%?\t%0, -1"
  14086. + [(set_attr "length" "4")
  14087. + (set_attr "cc" "cmp_cond_insn")
  14088. + (set_attr "predicable" "yes")])
  14089. +
  14090. +
  14091. +;;=============================================================================
  14092. +;; Bit load
  14093. +;;-----------------------------------------------------------------------------
  14094. +;; Load a bit into Z and C flags
  14095. +;;=============================================================================
  14096. +(define_insn "bldsi"
  14097. + [(set (cc0)
  14098. + (and:SI (match_operand:SI 0 "register_operand" "r")
  14099. + (match_operand:SI 1 "one_bit_set_operand" "i")))]
  14100. + ""
  14101. + "bld\t%0, %p1"
  14102. + [(set_attr "length" "4")
  14103. + (set_attr "cc" "bld")]
  14104. + )
  14105. +
  14106. +
  14107. +;;=============================================================================
  14108. +;; Compare
  14109. +;;-----------------------------------------------------------------------------
  14110. +;; Compare reg0 with reg1 or an immediate value.
  14111. +;;=============================================================================
  14112. +
  14113. +(define_expand "cmp<mode>"
  14114. + [(set (cc0)
  14115. + (compare:CMP
  14116. + (match_operand:CMP 0 "register_operand" "")
  14117. + (match_operand:CMP 1 "<CMP:cmp_predicate>" "")))]
  14118. + ""
  14119. + "{
  14120. + avr32_compare_op0 = operands[0];
  14121. + avr32_compare_op1 = operands[1];
  14122. + }"
  14123. +)
  14124. +
  14125. +(define_insn "cmp<mode>_internal"
  14126. + [(set (cc0)
  14127. + (compare:CMP
  14128. + (match_operand:CMP 0 "register_operand" "r")
  14129. + (match_operand:CMP 1 "<CMP:cmp_predicate>" "<CMP:cmp_constraint>")))]
  14130. + ""
  14131. + {
  14132. +switch(GET_MODE(operands[0]))
  14133. + {
  14134. + case QImode:
  14135. + avr32_branch_type = CMP_QI;
  14136. + break;
  14137. + case HImode:
  14138. + avr32_branch_type = CMP_HI;
  14139. + break;
  14140. + case SImode:
  14141. + avr32_branch_type = CMP_SI;
  14142. + break;
  14143. + case DImode:
  14144. + avr32_branch_type = CMP_DI;
  14145. + break;
  14146. + default:
  14147. + abort();
  14148. + }
  14149. + /* Check if the next insn already will output a compare. */
  14150. + if (!next_insn_emits_cmp (insn))
  14151. + set_next_insn_cond(insn,
  14152. + avr32_output_cmp(get_next_insn_cond(insn), GET_MODE (operands[0]), operands[0], operands[1]));
  14153. + return "";
  14154. + }
  14155. + [(set_attr "length" "4")
  14156. + (set_attr "cc" "compare")])
  14157. +
  14158. +(define_expand "cmpsf"
  14159. + [(set (cc0)
  14160. + (compare:SF
  14161. + (match_operand:SF 0 "general_operand" "")
  14162. + (match_operand:SF 1 "general_operand" "")))]
  14163. + "TARGET_ARCH_FPU && TARGET_HARD_FLOAT"
  14164. + "{
  14165. + if ( !REG_P(operands[0]) )
  14166. + operands[0] = force_reg(SFmode, operands[0]);
  14167. +
  14168. + if ( !REG_P(operands[1]) )
  14169. + operands[1] = force_reg(SFmode, operands[1]);
  14170. +
  14171. + avr32_compare_op0 = operands[0];
  14172. + avr32_compare_op1 = operands[1];
  14173. + emit_insn(gen_cmpsf_internal_uc3fp(operands[0], operands[1]));
  14174. + DONE;
  14175. + }"
  14176. +)
  14177. +
  14178. +;;;=============================================================================
  14179. +;; Test if zero
  14180. +;;-----------------------------------------------------------------------------
  14181. +;; Compare reg against zero and set the condition codes.
  14182. +;;=============================================================================
  14183. +
  14184. +
  14185. +(define_expand "tstsi"
  14186. + [(set (cc0)
  14187. + (match_operand:SI 0 "register_operand" ""))]
  14188. + ""
  14189. + {
  14190. + avr32_compare_op0 = operands[0];
  14191. + avr32_compare_op1 = const0_rtx;
  14192. + }
  14193. +)
  14194. +
  14195. +(define_insn "tstsi_internal"
  14196. + [(set (cc0)
  14197. + (match_operand:SI 0 "register_operand" "r"))]
  14198. + ""
  14199. + {
  14200. + /* Check if the next insn already will output a compare. */
  14201. + if (!next_insn_emits_cmp (insn))
  14202. + set_next_insn_cond(insn,
  14203. + avr32_output_cmp(get_next_insn_cond(insn), SImode, operands[0], const0_rtx));
  14204. +
  14205. + return "";
  14206. + }
  14207. + [(set_attr "length" "2")
  14208. + (set_attr "cc" "compare")])
  14209. +
  14210. +
  14211. +(define_expand "tstdi"
  14212. + [(set (cc0)
  14213. + (match_operand:DI 0 "register_operand" ""))]
  14214. + ""
  14215. + {
  14216. + avr32_compare_op0 = operands[0];
  14217. + avr32_compare_op1 = const0_rtx;
  14218. + }
  14219. +)
  14220. +
  14221. +(define_insn "tstdi_internal"
  14222. + [(set (cc0)
  14223. + (match_operand:DI 0 "register_operand" "r"))]
  14224. + ""
  14225. + {
  14226. + /* Check if the next insn already will output a compare. */
  14227. + if (!next_insn_emits_cmp (insn))
  14228. + set_next_insn_cond(insn,
  14229. + avr32_output_cmp(get_next_insn_cond(insn), DImode, operands[0], const0_rtx));
  14230. + return "";
  14231. + }
  14232. + [(set_attr "length" "4")
  14233. + (set_attr "type" "alu2")
  14234. + (set_attr "cc" "compare")])
  14235. +
  14236. +
  14237. +
  14238. +;;=============================================================================
  14239. +;; Convert operands
  14240. +;;-----------------------------------------------------------------------------
  14241. +;;
  14242. +;;=============================================================================
  14243. +(define_insn "truncdisi2"
  14244. + [(set (match_operand:SI 0 "general_operand" "")
  14245. + (truncate:SI (match_operand:DI 1 "general_operand" "")))]
  14246. + ""
  14247. + "truncdisi2")
  14248. +
  14249. +;;=============================================================================
  14250. +;; Extend
  14251. +;;-----------------------------------------------------------------------------
  14252. +;;
  14253. +;;=============================================================================
  14254. +
  14255. +
  14256. +(define_insn "extendhisi2"
  14257. + [(set (match_operand:SI 0 "register_operand" "=r,r,r,r")
  14258. + (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "0,r,<RKu00>,m")))]
  14259. + ""
  14260. + {
  14261. + switch ( which_alternative ){
  14262. + case 0:
  14263. + return "casts.h\t%0";
  14264. + case 1:
  14265. + return "bfexts\t%0, %1, 0, 16";
  14266. + case 2:
  14267. + case 3:
  14268. + return "ld.sh\t%0, %1";
  14269. + default:
  14270. + abort();
  14271. + }
  14272. + }
  14273. + [(set_attr "length" "2,4,2,4")
  14274. + (set_attr "cc" "set_ncz,set_ncz,none,none")
  14275. + (set_attr "type" "alu,alu,load_rm,load_rm")])
  14276. +
  14277. +(define_insn "extendqisi2"
  14278. + [(set (match_operand:SI 0 "register_operand" "=r,r,r,r")
  14279. + (sign_extend:SI (match_operand:QI 1 "extendqi_operand" "0,r,RKu00,m")))]
  14280. + ""
  14281. + {
  14282. + switch ( which_alternative ){
  14283. + case 0:
  14284. + return "casts.b\t%0";
  14285. + case 1:
  14286. + return "bfexts\t%0, %1, 0, 8";
  14287. + case 2:
  14288. + case 3:
  14289. + return "ld.sb\t%0, %1";
  14290. + default:
  14291. + abort();
  14292. + }
  14293. + }
  14294. + [(set_attr "length" "2,4,2,4")
  14295. + (set_attr "cc" "set_ncz,set_ncz,none,none")
  14296. + (set_attr "type" "alu,alu,load_rm,load_rm")])
  14297. +
  14298. +(define_insn "extendqihi2"
  14299. + [(set (match_operand:HI 0 "register_operand" "=r,r,r,r")
  14300. + (sign_extend:HI (match_operand:QI 1 "extendqi_operand" "0,r,RKu00,m")))]
  14301. + ""
  14302. + {
  14303. + switch ( which_alternative ){
  14304. + case 0:
  14305. + return "casts.b\t%0";
  14306. + case 1:
  14307. + return "bfexts\t%0, %1, 0, 8";
  14308. + case 2:
  14309. + case 3:
  14310. + return "ld.sb\t%0, %1";
  14311. + default:
  14312. + abort();
  14313. + }
  14314. + }
  14315. + [(set_attr "length" "2,4,2,4")
  14316. + (set_attr "cc" "set_ncz,set_ncz,none,none")
  14317. + (set_attr "type" "alu,alu,load_rm,load_rm")])
  14318. +
  14319. +
  14320. +;;=============================================================================
  14321. +;; Zero-extend
  14322. +;;-----------------------------------------------------------------------------
  14323. +;;
  14324. +;;=============================================================================
  14325. +
  14326. +(define_insn "zero_extendhisi2"
  14327. + [(set (match_operand:SI 0 "register_operand" "=r,r,r,r")
  14328. + (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "0,r,<RKu00>,m")))]
  14329. + ""
  14330. + {
  14331. + switch ( which_alternative ){
  14332. + case 0:
  14333. + return "castu.h\t%0";
  14334. + case 1:
  14335. + return "bfextu\t%0, %1, 0, 16";
  14336. + case 2:
  14337. + case 3:
  14338. + return "ld.uh\t%0, %1";
  14339. + default:
  14340. + abort();
  14341. + }
  14342. + }
  14343. +
  14344. + [(set_attr "length" "2,4,2,4")
  14345. + (set_attr "cc" "set_ncz,set_ncz,none,none")
  14346. + (set_attr "type" "alu,alu,load_rm,load_rm")])
  14347. +
  14348. +(define_insn "zero_extendqisi2"
  14349. + [(set (match_operand:SI 0 "register_operand" "=r,r,r,r")
  14350. + (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "0,r,<RKu00>,m")))]
  14351. + ""
  14352. + {
  14353. + switch ( which_alternative ){
  14354. + case 0:
  14355. + return "castu.b\t%0";
  14356. + case 1:
  14357. + return "bfextu\t%0, %1, 0, 8";
  14358. + case 2:
  14359. + case 3:
  14360. + return "ld.ub\t%0, %1";
  14361. + default:
  14362. + abort();
  14363. + }
  14364. + }
  14365. + [(set_attr "length" "2,4,2,4")
  14366. + (set_attr "cc" "set_ncz, set_ncz, none, none")
  14367. + (set_attr "type" "alu, alu, load_rm, load_rm")])
  14368. +
  14369. +(define_insn "zero_extendqihi2"
  14370. + [(set (match_operand:HI 0 "register_operand" "=r,r,r,r")
  14371. + (zero_extend:HI (match_operand:QI 1 "nonimmediate_operand" "0,r,<RKu00>,m")))]
  14372. + ""
  14373. + {
  14374. + switch ( which_alternative ){
  14375. + case 0:
  14376. + return "castu.b\t%0";
  14377. + case 1:
  14378. + return "bfextu\t%0, %1, 0, 8";
  14379. + case 2:
  14380. + case 3:
  14381. + return "ld.ub\t%0, %1";
  14382. + default:
  14383. + abort();
  14384. + }
  14385. + }
  14386. + [(set_attr "length" "2,4,2,4")
  14387. + (set_attr "cc" "set_ncz, set_ncz, none, none")
  14388. + (set_attr "type" "alu, alu, load_rm, load_rm")])
  14389. +
  14390. +
  14391. +;;=============================================================================
  14392. +;; Conditional load and extend insns
  14393. +;;=============================================================================
  14394. +(define_insn "ldsi<mode>_predicable_se"
  14395. + [(set (match_operand:SI 0 "register_operand" "=r")
  14396. + (sign_extend:SI
  14397. + (match_operand:INTM 1 "memory_operand" "<INTM:pred_mem_constraint>")))]
  14398. + "TARGET_V2_INSNS"
  14399. + "ld<INTM:load_postfix_s>%?\t%0, %1"
  14400. + [(set_attr "length" "4")
  14401. + (set_attr "cc" "cmp_cond_insn")
  14402. + (set_attr "type" "load")
  14403. + (set_attr "predicable" "yes")]
  14404. +)
  14405. +
  14406. +(define_insn "ldsi<mode>_predicable_ze"
  14407. + [(set (match_operand:SI 0 "register_operand" "=r")
  14408. + (zero_extend:SI
  14409. + (match_operand:INTM 1 "memory_operand" "<INTM:pred_mem_constraint>")))]
  14410. + "TARGET_V2_INSNS"
  14411. + "ld<INTM:load_postfix_u>%?\t%0, %1"
  14412. + [(set_attr "length" "4")
  14413. + (set_attr "cc" "cmp_cond_insn")
  14414. + (set_attr "type" "load")
  14415. + (set_attr "predicable" "yes")]
  14416. +)
  14417. +
  14418. +(define_insn "ldhi_predicable_ze"
  14419. + [(set (match_operand:HI 0 "register_operand" "=r")
  14420. + (zero_extend:HI
  14421. + (match_operand:QI 1 "memory_operand" "RKs10")))]
  14422. + "TARGET_V2_INSNS"
  14423. + "ld.ub%?\t%0, %1"
  14424. + [(set_attr "length" "4")
  14425. + (set_attr "cc" "cmp_cond_insn")
  14426. + (set_attr "type" "load")
  14427. + (set_attr "predicable" "yes")]
  14428. +)
  14429. +
  14430. +(define_insn "ldhi_predicable_se"
  14431. + [(set (match_operand:HI 0 "register_operand" "=r")
  14432. + (sign_extend:HI
  14433. + (match_operand:QI 1 "memory_operand" "RKs10")))]
  14434. + "TARGET_V2_INSNS"
  14435. + "ld.sb%?\t%0, %1"
  14436. + [(set_attr "length" "4")
  14437. + (set_attr "cc" "cmp_cond_insn")
  14438. + (set_attr "type" "load")
  14439. + (set_attr "predicable" "yes")]
  14440. +)
  14441. +
  14442. +;;=============================================================================
  14443. +;; Conditional set register
  14444. +;; sr{cond4} rd
  14445. +;;-----------------------------------------------------------------------------
  14446. +
  14447. +;;Because of the same issue as with conditional moves and adds we must
  14448. +;;not separate the compare instrcution from the scc instruction as
  14449. +;;they might be sheduled "badly".
  14450. +
  14451. +(define_expand "s<code>"
  14452. + [(set (match_operand:SI 0 "register_operand" "=r")
  14453. + (any_cond:SI (cc0)
  14454. + (const_int 0)))]
  14455. +""
  14456. +{
  14457. + if(TARGET_HARD_FLOAT && TARGET_ARCH_FPU)
  14458. + FAIL;
  14459. +})
  14460. +
  14461. +(define_insn "*s<code>"
  14462. + [(set (match_operand:SI 0 "register_operand" "=r")
  14463. + (any_cond:SI (cc0)
  14464. + (const_int 0)))]
  14465. + ""
  14466. +{
  14467. + return "sr<cond>\t%0";
  14468. +}
  14469. +[(set_attr "length" "2")
  14470. +(set_attr "cc" "none")])
  14471. +
  14472. +(define_insn "seq"
  14473. +[(set (match_operand:SI 0 "register_operand" "=r")
  14474. +(eq:SI (cc0)
  14475. + (const_int 0)))]
  14476. + ""
  14477. +"sreq\t%0"
  14478. +[(set_attr "length" "2")
  14479. +(set_attr "cc" "none")])
  14480. +
  14481. +(define_insn "sne"
  14482. +[(set (match_operand:SI 0 "register_operand" "=r")
  14483. +(ne:SI (cc0)
  14484. + (const_int 0)))]
  14485. + ""
  14486. +"srne\t%0"
  14487. + [(set_attr "length" "2")
  14488. + (set_attr "cc" "none")])
  14489. +
  14490. +(define_insn "smi"
  14491. + [(set (match_operand:SI 0 "register_operand" "=r")
  14492. + (unspec:SI [(cc0)
  14493. + (const_int 0)] UNSPEC_COND_MI))]
  14494. + ""
  14495. + "srmi\t%0"
  14496. + [(set_attr "length" "2")
  14497. + (set_attr "cc" "none")])
  14498. +
  14499. +(define_insn "spl"
  14500. + [(set (match_operand:SI 0 "register_operand" "=r")
  14501. + (unspec:SI [(cc0)
  14502. + (const_int 0)] UNSPEC_COND_PL))]
  14503. + ""
  14504. + "srpl\t%0"
  14505. + [(set_attr "length" "2")
  14506. + (set_attr "cc" "none")])
  14507. +
  14508. +
  14509. +;;=============================================================================
  14510. +;; Conditional branch
  14511. +;;-----------------------------------------------------------------------------
  14512. +;; Branch to label if the specified condition codes are set.
  14513. +;;=============================================================================
  14514. +; branch if negative
  14515. +(define_insn "bmi"
  14516. + [(set (pc)
  14517. + (if_then_else (unspec:CC [(cc0) (const_int 0)] UNSPEC_COND_MI)
  14518. + (label_ref (match_operand 0 "" ""))
  14519. + (pc)))]
  14520. + ""
  14521. + "brmi %0"
  14522. + [(set_attr "type" "branch")
  14523. + (set (attr "length")
  14524. + (cond [(and (le (minus (match_dup 0) (pc)) (const_int 254))
  14525. + (le (minus (pc) (match_dup 0)) (const_int 256)))
  14526. + (const_int 2)] ; use compact branch
  14527. + (const_int 4))) ; use extended branch
  14528. + (set_attr "cc" "none")])
  14529. +
  14530. +(define_insn "*bmi-reverse"
  14531. + [(set (pc)
  14532. + (if_then_else (unspec:CC [(cc0) (const_int 0)] UNSPEC_COND_MI)
  14533. + (pc)
  14534. + (label_ref (match_operand 0 "" ""))))]
  14535. + ""
  14536. + "brpl %0"
  14537. + [(set_attr "type" "branch")
  14538. + (set (attr "length")
  14539. + (cond [(and (le (minus (match_dup 0) (pc)) (const_int 254))
  14540. + (le (minus (pc) (match_dup 0)) (const_int 256)))
  14541. + (const_int 2)] ; use compact branch
  14542. + (const_int 4))) ; use extended branch
  14543. + (set_attr "cc" "none")])
  14544. +
  14545. +; branch if positive
  14546. +(define_insn "bpl"
  14547. + [(set (pc)
  14548. + (if_then_else (unspec:CC [(cc0) (const_int 0)] UNSPEC_COND_PL)
  14549. + (label_ref (match_operand 0 "" ""))
  14550. + (pc)))]
  14551. + ""
  14552. + "brpl %0"
  14553. + [(set_attr "type" "branch")
  14554. + (set (attr "length")
  14555. + (cond [(and (le (minus (match_dup 0) (pc)) (const_int 254))
  14556. + (le (minus (pc) (match_dup 0)) (const_int 256)))
  14557. + (const_int 2)] ; use compact branch
  14558. + (const_int 4))) ; use extended branch
  14559. + (set_attr "cc" "none")])
  14560. +
  14561. +(define_insn "*bpl-reverse"
  14562. + [(set (pc)
  14563. + (if_then_else (unspec:CC [(cc0) (const_int 0)] UNSPEC_COND_PL)
  14564. + (pc)
  14565. + (label_ref (match_operand 0 "" ""))))]
  14566. + ""
  14567. + "brmi %0"
  14568. + [(set_attr "type" "branch")
  14569. + (set (attr "length")
  14570. + (cond [(and (le (minus (match_dup 0) (pc)) (const_int 254))
  14571. + (le (minus (pc) (match_dup 0)) (const_int 256)))
  14572. + (const_int 2)] ; use compact branch
  14573. + (const_int 4))) ; use extended branch
  14574. + (set_attr "cc" "none")])
  14575. +
  14576. +; branch if equal
  14577. +(define_insn "b<code>"
  14578. + [(set (pc)
  14579. + (if_then_else (any_cond_b:CC (cc0)
  14580. + (const_int 0))
  14581. + (label_ref (match_operand 0 "" ""))
  14582. + (pc)))]
  14583. + ""
  14584. + {
  14585. + if (TARGET_HARD_FLOAT && TARGET_ARCH_FPU && (avr32_branch_type == CMP_SF))
  14586. + return get_attr_length(insn) == 6 ? "brvs .+6\;br<cond> %0" : "brvs .+8\;br<cond> %0";
  14587. + else
  14588. + return "br<cond> %0";
  14589. + }
  14590. + [(set_attr "type" "branch")
  14591. + (set (attr "length")
  14592. + (if_then_else (eq (const_int 1)(symbol_ref "TARGET_HARD_FLOAT && TARGET_ARCH_FPU"))
  14593. + (if_then_else
  14594. + (and (le (minus (match_dup 0) (pc)) (const_int 254))
  14595. + (le (minus (pc) (match_dup 0)) (const_int 256)))
  14596. + (const_int 6)
  14597. + (const_int 8))
  14598. + (if_then_else
  14599. + (and (le (minus (match_dup 0) (pc)) (const_int 254))
  14600. + (le (minus (pc) (match_dup 0)) (const_int 256)))
  14601. + (const_int 2)
  14602. + (const_int 4))))
  14603. + (set_attr "cc" "none")])
  14604. +
  14605. +(define_insn "beq"
  14606. + [(set (pc)
  14607. + (if_then_else (eq:CC (cc0)
  14608. + (const_int 0))
  14609. + (label_ref (match_operand 0 "" ""))
  14610. + (pc)))]
  14611. + ""
  14612. + "breq %0";
  14613. + [(set_attr "type" "branch")
  14614. + (set (attr "length")
  14615. + (cond [(and (le (minus (match_dup 0) (pc)) (const_int 254))
  14616. + (le (minus (pc) (match_dup 0)) (const_int 256)))
  14617. + (const_int 2)] ; use compact branch
  14618. + (const_int 4))) ; use extended branch
  14619. + (set_attr "cc" "none")])
  14620. +
  14621. +(define_insn "bne"
  14622. + [(set (pc)
  14623. + (if_then_else (ne:CC (cc0)
  14624. + (const_int 0))
  14625. + (label_ref (match_operand 0 "" ""))
  14626. + (pc)))]
  14627. + ""
  14628. + "brne %0";
  14629. + [(set_attr "type" "branch")
  14630. + (set (attr "length")
  14631. + (cond [(and (le (minus (match_dup 0) (pc)) (const_int 254))
  14632. + (le (minus (pc) (match_dup 0)) (const_int 256)))
  14633. + (const_int 2)] ; use compact branch
  14634. + (const_int 4))) ; use extended branch
  14635. + (set_attr "cc" "none")])
  14636. +
  14637. +(define_insn "b<code>"
  14638. + [(set (pc)
  14639. + (if_then_else (any_cond4:CC (cc0)
  14640. + (const_int 0))
  14641. + (label_ref (match_operand 0 "" ""))
  14642. + (pc)))]
  14643. + ""
  14644. + {
  14645. + if(TARGET_HARD_FLOAT && TARGET_ARCH_FPU && (avr32_branch_type == CMP_SF))
  14646. + return "brvs .+8\;br<cond> %l0";
  14647. + else
  14648. + return "br<cond> %l0";
  14649. + }
  14650. + [(set_attr "type" "branch")
  14651. + (set (attr "length")
  14652. + (cond [(eq (const_int 1)(symbol_ref "TARGET_HARD_FLOAT && TARGET_ARCH_FPU"))
  14653. + (const_int 8)]
  14654. + (const_int 4)))
  14655. + (set_attr "cc" "none")])
  14656. +
  14657. +(define_insn "*b<code>-reverse"
  14658. + [(set (pc)
  14659. + (if_then_else (any_cond_b:CC (cc0)
  14660. + (const_int 0))
  14661. + (pc)
  14662. + (label_ref (match_operand 0 "" ""))))]
  14663. + ""
  14664. + {
  14665. + if (TARGET_HARD_FLOAT && TARGET_ARCH_FPU && (avr32_branch_type == CMP_SF))
  14666. + return "brvs %0\;br<invcond> %0";
  14667. + else
  14668. + return "br<invcond> %0";
  14669. + }
  14670. + [(set_attr "type" "branch")
  14671. + (set (attr "length")
  14672. + (if_then_else (eq (const_int 1)(symbol_ref "TARGET_HARD_FLOAT && TARGET_ARCH_FPU"))
  14673. + (if_then_else
  14674. + (and (le (minus (match_dup 0) (pc)) (const_int 254))
  14675. + (le (minus (pc) (match_dup 0)) (const_int 256)))
  14676. + (const_int 6)
  14677. + (const_int 8))
  14678. + (if_then_else
  14679. + (and (le (minus (match_dup 0) (pc)) (const_int 254))
  14680. + (le (minus (pc) (match_dup 0)) (const_int 256)))
  14681. + (const_int 2)
  14682. + (const_int 4))))
  14683. + (set_attr "cc" "none")])
  14684. +
  14685. +(define_insn "*beq-reverse"
  14686. + [(set (pc)
  14687. + (if_then_else (eq:CC (cc0)
  14688. + (const_int 0))
  14689. + (pc)
  14690. + (label_ref (match_operand 0 "" ""))))]
  14691. + ""
  14692. + "brne %0";
  14693. + [(set_attr "type" "branch")
  14694. + (set (attr "length")
  14695. + (cond [(and (le (minus (match_dup 0) (pc)) (const_int 254))
  14696. + (le (minus (pc) (match_dup 0)) (const_int 256)))
  14697. + (const_int 2)] ; use compact branch
  14698. + (const_int 4))) ; use extended branch
  14699. + (set_attr "cc" "none")])
  14700. +
  14701. +(define_insn "*bne-reverse"
  14702. + [(set (pc)
  14703. + (if_then_else (ne:CC (cc0)
  14704. + (const_int 0))
  14705. + (pc)
  14706. + (label_ref (match_operand 0 "" ""))))]
  14707. + ""
  14708. + "breq %0";
  14709. + [(set_attr "type" "branch")
  14710. + (set (attr "length")
  14711. + (cond [(and (le (minus (match_dup 0) (pc)) (const_int 254))
  14712. + (le (minus (pc) (match_dup 0)) (const_int 256)))
  14713. + (const_int 2)] ; use compact branch
  14714. + (const_int 4))) ; use extended branch
  14715. + (set_attr "cc" "none")])
  14716. +
  14717. +(define_insn "*b<code>-reverse"
  14718. + [(set (pc)
  14719. + (if_then_else (any_cond4:CC (cc0)
  14720. + (const_int 0))
  14721. + (pc)
  14722. + (label_ref (match_operand 0 "" ""))))]
  14723. + ""
  14724. + {
  14725. + if (TARGET_HARD_FLOAT && TARGET_ARCH_FPU && (avr32_branch_type == CMP_SF))
  14726. + return "brvs %l0\;br<invcond> %l0";
  14727. + else
  14728. + return "br<invcond> %0";
  14729. + }
  14730. + [(set_attr "type" "branch")
  14731. + (set (attr "length")
  14732. + (cond [(eq (const_int 1)(symbol_ref "TARGET_HARD_FLOAT && TARGET_ARCH_FPU"))
  14733. + (const_int 8)]
  14734. + (const_int 4)))
  14735. + (set_attr "cc" "none")])
  14736. +
  14737. +;=============================================================================
  14738. +; Conditional Add/Subtract
  14739. +;-----------------------------------------------------------------------------
  14740. +; sub{cond4} Rd, imm
  14741. +;=============================================================================
  14742. +
  14743. +
  14744. +(define_expand "add<mode>cc"
  14745. + [(set (match_operand:ADDCC 0 "register_operand" "")
  14746. + (if_then_else:ADDCC (match_operator 1 "avr32_comparison_operator"
  14747. + [(match_dup 4)
  14748. + (match_dup 5)])
  14749. + (match_operand:ADDCC 2 "register_operand" "")
  14750. + (plus:ADDCC
  14751. + (match_dup 2)
  14752. + (match_operand:ADDCC 3 "" ""))))]
  14753. + ""
  14754. + {
  14755. + if ( !(GET_CODE (operands[3]) == CONST_INT
  14756. + || (TARGET_V2_INSNS && REG_P(operands[3]))) ){
  14757. + FAIL;
  14758. + }
  14759. +
  14760. + /* Delete compare instruction as it is merged into this instruction */
  14761. + remove_insn (get_last_insn_anywhere ());
  14762. +
  14763. + operands[4] = avr32_compare_op0;
  14764. + operands[5] = avr32_compare_op1;
  14765. +
  14766. + if ( TARGET_V2_INSNS
  14767. + && REG_P(operands[3])
  14768. + && REGNO(operands[0]) != REGNO(operands[2]) ){
  14769. + emit_move_insn (operands[0], operands[2]);
  14770. + operands[2] = operands[0];
  14771. + }
  14772. + }
  14773. + )
  14774. +
  14775. +(define_insn "add<ADDCC:mode>cc_cmp<CMP:mode>_reg"
  14776. + [(set (match_operand:ADDCC 0 "register_operand" "=r")
  14777. + (if_then_else:ADDCC (match_operator 1 "avr32_comparison_operator"
  14778. + [(match_operand:CMP 4 "register_operand" "r")
  14779. + (match_operand:CMP 5 "<CMP:cmp_predicate>" "<CMP:cmp_constraint>")])
  14780. + (match_dup 0)
  14781. + (plus:ADDCC
  14782. + (match_operand:ADDCC 2 "register_operand" "r")
  14783. + (match_operand:ADDCC 3 "register_operand" "r"))))]
  14784. + "TARGET_V2_INSNS"
  14785. + {
  14786. + operands[1] = avr32_output_cmp(operands[1], GET_MODE(operands[4]), operands[4], operands[5]);
  14787. + return "add%i1\t%0, %2, %3";
  14788. + }
  14789. + [(set_attr "length" "8")
  14790. + (set_attr "cc" "cmp_cond_insn")])
  14791. +
  14792. +(define_insn "add<ADDCC:mode>cc_cmp<CMP:mode>"
  14793. + [(set (match_operand:ADDCC 0 "register_operand" "=r")
  14794. + (if_then_else:ADDCC (match_operator 1 "avr32_comparison_operator"
  14795. + [(match_operand:CMP 4 "register_operand" "r")
  14796. + (match_operand:CMP 5 "<CMP:cmp_predicate>" "<CMP:cmp_constraint>")])
  14797. + (match_operand:ADDCC 2 "register_operand" "0")
  14798. + (plus:ADDCC
  14799. + (match_dup 2)
  14800. + (match_operand:ADDCC 3 "avr32_cond_immediate_operand" "Is08"))))]
  14801. + ""
  14802. + {
  14803. + operands[1] = avr32_output_cmp(operands[1], GET_MODE(operands[4]), operands[4], operands[5]);
  14804. + return "sub%i1\t%0, -%3";
  14805. + }
  14806. + [(set_attr "length" "8")
  14807. + (set_attr "cc" "cmp_cond_insn")])
  14808. +
  14809. +;=============================================================================
  14810. +; Conditional Move
  14811. +;-----------------------------------------------------------------------------
  14812. +; mov{cond4} Rd, (Rs/imm)
  14813. +;=============================================================================
  14814. +(define_expand "mov<mode>cc"
  14815. + [(set (match_operand:MOVCC 0 "register_operand" "")
  14816. + (if_then_else:MOVCC (match_operator 1 "avr32_comparison_operator"
  14817. + [(match_dup 4)
  14818. + (match_dup 5)])
  14819. + (match_operand:MOVCC 2 "avr32_cond_register_immediate_operand" "")
  14820. + (match_operand:MOVCC 3 "avr32_cond_register_immediate_operand" "")))]
  14821. + ""
  14822. + {
  14823. + /* Delete compare instruction as it is merged into this instruction */
  14824. + remove_insn (get_last_insn_anywhere ());
  14825. +
  14826. + operands[4] = avr32_compare_op0;
  14827. + operands[5] = avr32_compare_op1;
  14828. + }
  14829. + )
  14830. +
  14831. +
  14832. +(define_insn "mov<MOVCC:mode>cc_cmp<CMP:mode>"
  14833. + [(set (match_operand:MOVCC 0 "register_operand" "=r,r,r")
  14834. + (if_then_else:MOVCC (match_operator 1 "avr32_comparison_operator"
  14835. + [(match_operand:CMP 4 "register_operand" "r,r,r")
  14836. + (match_operand:CMP 5 "<CMP:cmp_predicate>" "<CMP:cmp_constraint>,<CMP:cmp_constraint>,<CMP:cmp_constraint>")])
  14837. + (match_operand:MOVCC 2 "avr32_cond_register_immediate_operand" "0, rKs08,rKs08")
  14838. + (match_operand:MOVCC 3 "avr32_cond_register_immediate_operand" "rKs08,0,rKs08")))]
  14839. + ""
  14840. + {
  14841. + operands[1] = avr32_output_cmp(operands[1], GET_MODE(operands[4]), operands[4], operands[5]);
  14842. +
  14843. + switch( which_alternative ){
  14844. + case 0:
  14845. + return "mov%i1 %0, %3";
  14846. + case 1:
  14847. + return "mov%1 %0, %2";
  14848. + case 2:
  14849. + return "mov%1 %0, %2\;mov%i1 %0, %3";
  14850. + default:
  14851. + abort();
  14852. + }
  14853. +
  14854. + }
  14855. + [(set_attr "length" "8,8,12")
  14856. + (set_attr "cc" "cmp_cond_insn")])
  14857. +
  14858. +
  14859. +
  14860. +
  14861. +;;=============================================================================
  14862. +;; jump
  14863. +;;-----------------------------------------------------------------------------
  14864. +;; Jump inside a function; an unconditional branch to a label.
  14865. +;;=============================================================================
  14866. +(define_insn "jump"
  14867. + [(set (pc)
  14868. + (label_ref (match_operand 0 "" "")))]
  14869. + ""
  14870. + {
  14871. + if (get_attr_length(insn) > 4)
  14872. + return "Can't jump this far";
  14873. + return (get_attr_length(insn) == 2 ?
  14874. + "rjmp %0" : "bral %0");
  14875. + }
  14876. + [(set_attr "type" "branch")
  14877. + (set (attr "length")
  14878. + (cond [(and (le (minus (match_dup 0) (pc)) (const_int 1022))
  14879. + (le (minus (pc) (match_dup 0)) (const_int 1024)))
  14880. + (const_int 2) ; use rjmp
  14881. + (le (match_dup 0) (const_int 1048575))
  14882. + (const_int 4)] ; use bral
  14883. + (const_int 8))) ; do something else
  14884. + (set_attr "cc" "none")])
  14885. +
  14886. +;;=============================================================================
  14887. +;; call
  14888. +;;-----------------------------------------------------------------------------
  14889. +;; Subroutine call instruction returning no value.
  14890. +;;=============================================================================
  14891. +(define_insn "call_internal"
  14892. + [(parallel [(call (mem:SI (match_operand:SI 0 "avr32_call_operand" "r,U,T,W"))
  14893. + (match_operand 1 "" ""))
  14894. + (clobber (reg:SI LR_REGNUM))])]
  14895. + ""
  14896. + {
  14897. +
  14898. + /* Check for a flashvault call. */
  14899. + if (avr32_flashvault_call (SYMBOL_REF_DECL (operands[0])))
  14900. + {
  14901. + /* Assembly is already emitted. */
  14902. + return "";
  14903. + }
  14904. +
  14905. + switch (which_alternative) {
  14906. + case 0:
  14907. + return "icall\t%0";
  14908. + case 1:
  14909. + return "rcall\t%0";
  14910. + case 2:
  14911. + return "mcall\t%0";
  14912. + case 3:
  14913. + if (TARGET_HAS_ASM_ADDR_PSEUDOS)
  14914. + return "call\t%0";
  14915. + else
  14916. + return "mcall\tr6[%0@got]";
  14917. + default:
  14918. + abort();
  14919. + }
  14920. + }
  14921. + [(set_attr "type" "call")
  14922. + (set_attr "length" "2,4,4,10")
  14923. + (set_attr "cc" "clobber")])
  14924. +
  14925. +
  14926. +(define_expand "call"
  14927. + [(parallel [(call (match_operand:SI 0 "" "")
  14928. + (match_operand 1 "" ""))
  14929. + (clobber (reg:SI LR_REGNUM))])]
  14930. + ""
  14931. + {
  14932. + rtx call_address;
  14933. + if ( GET_CODE(operands[0]) != MEM )
  14934. + FAIL;
  14935. +
  14936. + call_address = XEXP(operands[0], 0);
  14937. +
  14938. + /* If assembler supports call pseudo insn and the call address is a symbol then nothing special needs to be done. */
  14939. + if (TARGET_HAS_ASM_ADDR_PSEUDOS && (GET_CODE(call_address) == SYMBOL_REF) )
  14940. + {
  14941. + /* We must however mark the function as using the GOT if flag_pic is set, since the call insn might turn into a mcall using the GOT ptr register. */
  14942. + if (flag_pic)
  14943. + {
  14944. + crtl->uses_pic_offset_table = 1;
  14945. + emit_call_insn(gen_call_internal(call_address, operands[1]));
  14946. + DONE;
  14947. + }
  14948. + }
  14949. + else
  14950. + {
  14951. + if (flag_pic && GET_CODE(call_address) == SYMBOL_REF )
  14952. + {
  14953. + crtl->uses_pic_offset_table = 1;
  14954. + emit_call_insn(gen_call_internal(call_address, operands[1]));
  14955. + DONE;
  14956. + }
  14957. +
  14958. + if (!SYMBOL_REF_RCALL_FUNCTION_P(operands[0]) )
  14959. + {
  14960. + if (optimize_size && GET_CODE(call_address) == SYMBOL_REF )
  14961. + {
  14962. + call_address = force_const_mem(SImode, call_address);
  14963. + }
  14964. + else
  14965. + {
  14966. + call_address = force_reg(SImode, call_address);
  14967. + }
  14968. + }
  14969. + }
  14970. + emit_call_insn(gen_call_internal(call_address, operands[1]));
  14971. + DONE;
  14972. +
  14973. + }
  14974. +)
  14975. +
  14976. +;;=============================================================================
  14977. +;; call_value
  14978. +;;-----------------------------------------------------------------------------
  14979. +;; Subroutine call instruction returning a value.
  14980. +;;=============================================================================
  14981. +(define_expand "call_value"
  14982. + [(parallel [(set (match_operand:SI 0 "" "")
  14983. + (call (match_operand:SI 1 "" "")
  14984. + (match_operand 2 "" "")))
  14985. + (clobber (reg:SI LR_REGNUM))])]
  14986. + ""
  14987. + {
  14988. + rtx call_address;
  14989. + if ( GET_CODE(operands[1]) != MEM )
  14990. + FAIL;
  14991. +
  14992. + call_address = XEXP(operands[1], 0);
  14993. +
  14994. + /* Check for a flashvault call.
  14995. + if (GET_CODE (call_address) == SYMBOL_REF
  14996. + && avr32_flashvault_call (SYMBOL_REF_DECL (call_address)))
  14997. + DONE;
  14998. +
  14999. + */
  15000. +
  15001. + /* If assembler supports call pseudo insn and the call
  15002. + address is a symbol then nothing special needs to be done. */
  15003. + if ( TARGET_HAS_ASM_ADDR_PSEUDOS
  15004. + && (GET_CODE(call_address) == SYMBOL_REF) ){
  15005. + /* We must however mark the function as using the GOT if
  15006. + flag_pic is set, since the call insn might turn into
  15007. + a mcall using the GOT ptr register. */
  15008. + if ( flag_pic ) {
  15009. + crtl->uses_pic_offset_table = 1;
  15010. + emit_call_insn(gen_call_value_internal(operands[0], call_address, operands[2]));
  15011. + DONE;
  15012. + }
  15013. + } else {
  15014. + if ( flag_pic &&
  15015. + GET_CODE(call_address) == SYMBOL_REF ){
  15016. + crtl->uses_pic_offset_table = 1;
  15017. + emit_call_insn(gen_call_value_internal(operands[0], call_address, operands[2]));
  15018. + DONE;
  15019. + }
  15020. +
  15021. + if ( !SYMBOL_REF_RCALL_FUNCTION_P(operands[1]) ){
  15022. + if ( optimize_size &&
  15023. + GET_CODE(call_address) == SYMBOL_REF){
  15024. + call_address = force_const_mem(SImode, call_address);
  15025. + } else {
  15026. + call_address = force_reg(SImode, call_address);
  15027. + }
  15028. + }
  15029. + }
  15030. + emit_call_insn(gen_call_value_internal(operands[0], call_address,
  15031. + operands[2]));
  15032. + DONE;
  15033. +
  15034. + })
  15035. +
  15036. +(define_insn "call_value_internal"
  15037. + [(parallel [(set (match_operand 0 "register_operand" "=r,r,r,r")
  15038. + (call (mem:SI (match_operand:SI 1 "avr32_call_operand" "r,U,T,W"))
  15039. + (match_operand 2 "" "")))
  15040. + (clobber (reg:SI LR_REGNUM))])]
  15041. + ;; Operand 2 not used on the AVR32.
  15042. + ""
  15043. + {
  15044. + /* Check for a flashvault call. */
  15045. + if (avr32_flashvault_call (SYMBOL_REF_DECL (operands[1])))
  15046. + {
  15047. + /* Assembly is already emitted. */
  15048. + return "";
  15049. + }
  15050. +
  15051. +
  15052. + switch (which_alternative) {
  15053. + case 0:
  15054. + return "icall\t%1";
  15055. + case 1:
  15056. + return "rcall\t%1";
  15057. + case 2:
  15058. + return "mcall\t%1";
  15059. + case 3:
  15060. + if ( TARGET_HAS_ASM_ADDR_PSEUDOS )
  15061. + return "call\t%1";
  15062. + else
  15063. + return "mcall\tr6[%1@got]";
  15064. + default:
  15065. + abort();
  15066. + }
  15067. + }
  15068. + [(set_attr "type" "call")
  15069. + (set_attr "length" "2,4,4,10")
  15070. + (set_attr "cc" "call_set")])
  15071. +
  15072. +
  15073. +;;=============================================================================
  15074. +;; untyped_call
  15075. +;;-----------------------------------------------------------------------------
  15076. +;; Subrutine call instruction returning a value of any type.
  15077. +;; The code is copied from m68k.md (except gen_blockage is removed)
  15078. +;; Fixme!
  15079. +;;=============================================================================
  15080. +(define_expand "untyped_call"
  15081. + [(parallel [(call (match_operand 0 "avr32_call_operand" "")
  15082. + (const_int 0))
  15083. + (match_operand 1 "" "")
  15084. + (match_operand 2 "" "")])]
  15085. + ""
  15086. + {
  15087. + int i;
  15088. +
  15089. + emit_call_insn (GEN_CALL (operands[0], const0_rtx, NULL, const0_rtx));
  15090. +
  15091. + for (i = 0; i < XVECLEN (operands[2], 0); i++) {
  15092. + rtx set = XVECEXP (operands[2], 0, i);
  15093. + emit_move_insn (SET_DEST (set), SET_SRC (set));
  15094. + }
  15095. +
  15096. + /* The optimizer does not know that the call sets the function value
  15097. + registers we stored in the result block. We avoid problems by
  15098. + claiming that all hard registers are used and clobbered at this
  15099. + point. */
  15100. + emit_insn (gen_blockage ());
  15101. +
  15102. + DONE;
  15103. + })
  15104. +
  15105. +
  15106. +;;=============================================================================
  15107. +;; return
  15108. +;;=============================================================================
  15109. +
  15110. +(define_insn "return"
  15111. + [(return)]
  15112. + "USE_RETURN_INSN (FALSE)"
  15113. + {
  15114. + avr32_output_return_instruction(TRUE, FALSE, NULL, NULL);
  15115. + return "";
  15116. + }
  15117. + [(set_attr "length" "4")
  15118. + (set_attr "type" "call")]
  15119. + )
  15120. +
  15121. +
  15122. +(define_insn "return_cond"
  15123. + [(set (pc)
  15124. + (if_then_else (match_operand 0 "avr32_comparison_operand" "")
  15125. + (return)
  15126. + (pc)))]
  15127. + "USE_RETURN_INSN (TRUE)"
  15128. + "ret%0\tr12";
  15129. + [(set_attr "type" "call")])
  15130. +
  15131. +(define_insn "return_cond_predicable"
  15132. + [(return)]
  15133. + "USE_RETURN_INSN (TRUE)"
  15134. + "ret%?\tr12";
  15135. + [(set_attr "type" "call")
  15136. + (set_attr "predicable" "yes")])
  15137. +
  15138. +
  15139. +(define_insn "return_imm"
  15140. + [(parallel [(set (reg RETVAL_REGNUM) (match_operand 0 "immediate_operand" "i"))
  15141. + (use (reg RETVAL_REGNUM))
  15142. + (return)])]
  15143. + "USE_RETURN_INSN (FALSE) &&
  15144. + ((INTVAL(operands[0]) == -1) || (INTVAL(operands[0]) == 0) || (INTVAL(operands[0]) == 1))"
  15145. + {
  15146. + avr32_output_return_instruction(TRUE, FALSE, NULL, operands[0]);
  15147. + return "";
  15148. + }
  15149. + [(set_attr "length" "4")
  15150. + (set_attr "type" "call")]
  15151. + )
  15152. +
  15153. +(define_insn "return_imm_cond"
  15154. + [(parallel [(set (reg RETVAL_REGNUM) (match_operand 0 "immediate_operand" "i"))
  15155. + (use (reg RETVAL_REGNUM))
  15156. + (set (pc)
  15157. + (if_then_else (match_operand 1 "avr32_comparison_operand" "")
  15158. + (return)
  15159. + (pc)))])]
  15160. + "USE_RETURN_INSN (TRUE) &&
  15161. + ((INTVAL(operands[0]) == -1) || (INTVAL(operands[0]) == 0) || (INTVAL(operands[0]) == 1))"
  15162. + "ret%1\t%0";
  15163. + [(set_attr "type" "call")]
  15164. + )
  15165. +
  15166. +(define_insn "return_imm_predicable"
  15167. + [(parallel [(set (reg RETVAL_REGNUM) (match_operand 0 "immediate_operand" "i"))
  15168. + (use (reg RETVAL_REGNUM))
  15169. + (return)])]
  15170. + "USE_RETURN_INSN (TRUE) &&
  15171. + ((INTVAL(operands[0]) == -1) || (INTVAL(operands[0]) == 0) || (INTVAL(operands[0]) == 1))"
  15172. + "ret%?\t%0";
  15173. + [(set_attr "type" "call")
  15174. + (set_attr "predicable" "yes")])
  15175. +
  15176. +(define_insn "return_<mode>reg"
  15177. + [(set (reg RETVAL_REGNUM) (match_operand:MOVM 0 "register_operand" "r"))
  15178. + (use (reg RETVAL_REGNUM))
  15179. + (return)]
  15180. + "USE_RETURN_INSN (TRUE)"
  15181. + "ret%?\t%0";
  15182. + [(set_attr "type" "call")
  15183. + (set_attr "predicable" "yes")])
  15184. +
  15185. +(define_insn "return_<mode>reg_cond"
  15186. + [(set (reg RETVAL_REGNUM) (match_operand:MOVM 0 "register_operand" "r"))
  15187. + (use (reg RETVAL_REGNUM))
  15188. + (set (pc)
  15189. + (if_then_else (match_operator 1 "avr32_comparison_operator"
  15190. + [(cc0) (const_int 0)])
  15191. + (return)
  15192. + (pc)))]
  15193. + "USE_RETURN_INSN (TRUE)"
  15194. + "ret%1\t%0";
  15195. + [(set_attr "type" "call")])
  15196. +
  15197. +;;=============================================================================
  15198. +;; nonlocal_goto_receiver
  15199. +;;-----------------------------------------------------------------------------
  15200. +;; For targets with a return stack we must make sure to flush the return stack
  15201. +;; since it will be corrupt after a nonlocal goto.
  15202. +;;=============================================================================
  15203. +(define_expand "nonlocal_goto_receiver"
  15204. + [(const_int 0)]
  15205. + "TARGET_RETURN_STACK"
  15206. + "
  15207. + {
  15208. + emit_insn ( gen_frs() );
  15209. + DONE;
  15210. + }
  15211. + "
  15212. + )
  15213. +
  15214. +
  15215. +;;=============================================================================
  15216. +;; builtin_setjmp_receiver
  15217. +;;-----------------------------------------------------------------------------
  15218. +;; For pic code we need to reload the pic register.
  15219. +;; For targets with a return stack we must make sure to flush the return stack
  15220. +;; since it will probably be corrupted.
  15221. +;;=============================================================================
  15222. +(define_expand "builtin_setjmp_receiver"
  15223. + [(label_ref (match_operand 0 "" ""))]
  15224. + "flag_pic"
  15225. + "
  15226. + {
  15227. + if ( TARGET_RETURN_STACK )
  15228. + emit_insn ( gen_frs() );
  15229. +
  15230. + avr32_load_pic_register ();
  15231. + DONE;
  15232. + }
  15233. + "
  15234. +)
  15235. +
  15236. +
  15237. +;;=============================================================================
  15238. +;; indirect_jump
  15239. +;;-----------------------------------------------------------------------------
  15240. +;; Jump to an address in reg or memory.
  15241. +;;=============================================================================
  15242. +(define_expand "indirect_jump"
  15243. + [(set (pc)
  15244. + (match_operand:SI 0 "general_operand" ""))]
  15245. + ""
  15246. + {
  15247. + /* One of the ops has to be in a register. */
  15248. + if ( (flag_pic || TARGET_HAS_ASM_ADDR_PSEUDOS )
  15249. + && !avr32_legitimate_pic_operand_p(operands[0]) )
  15250. + operands[0] = legitimize_pic_address (operands[0], SImode, 0);
  15251. + else if ( flag_pic && avr32_address_operand(operands[0], GET_MODE(operands[0])) )
  15252. + /* If we have an address operand then this function uses the pic register. */
  15253. + crtl->uses_pic_offset_table = 1;
  15254. + })
  15255. +
  15256. +
  15257. +(define_insn "indirect_jump_internal"
  15258. + [(set (pc)
  15259. + (match_operand:SI 0 "avr32_non_rmw_general_operand" "r,m,W"))]
  15260. + ""
  15261. + {
  15262. + switch( which_alternative ){
  15263. + case 0:
  15264. + return "mov\tpc, %0";
  15265. + case 1:
  15266. + if ( avr32_const_pool_ref_operand(operands[0], GET_MODE(operands[0])) )
  15267. + return "lddpc\tpc, %0";
  15268. + else
  15269. + return "ld.w\tpc, %0";
  15270. + case 2:
  15271. + if ( flag_pic )
  15272. + return "ld.w\tpc, r6[%0@got]";
  15273. + else
  15274. + return "lda.w\tpc, %0";
  15275. + default:
  15276. + abort();
  15277. + }
  15278. + }
  15279. + [(set_attr "length" "2,4,8")
  15280. + (set_attr "type" "call,call,call")
  15281. + (set_attr "cc" "none,none,clobber")])
  15282. +
  15283. +
  15284. +
  15285. +;;=============================================================================
  15286. +;; casesi and tablejump
  15287. +;;=============================================================================
  15288. +(define_insn "tablejump_add"
  15289. + [(set (pc)
  15290. + (plus:SI (match_operand:SI 0 "register_operand" "r")
  15291. + (mult:SI (match_operand:SI 1 "register_operand" "r")
  15292. + (match_operand:SI 2 "immediate_operand" "Ku04" ))))
  15293. + (use (label_ref (match_operand 3 "" "")))]
  15294. + "flag_pic &&
  15295. + ((INTVAL(operands[2]) == 0) || (INTVAL(operands[2]) == 2) ||
  15296. + (INTVAL(operands[2]) == 4) || (INTVAL(operands[2]) == 8))"
  15297. + "add\tpc, %0, %1 << %p2"
  15298. + [(set_attr "length" "4")
  15299. + (set_attr "cc" "clobber")])
  15300. +
  15301. +(define_insn "tablejump_insn"
  15302. + [(set (pc) (match_operand:SI 0 "memory_operand" "m"))
  15303. + (use (label_ref (match_operand 1 "" "")))]
  15304. + "!flag_pic"
  15305. + "ld.w\tpc, %0"
  15306. + [(set_attr "length" "4")
  15307. + (set_attr "type" "call")
  15308. + (set_attr "cc" "none")])
  15309. +
  15310. +(define_expand "casesi"
  15311. + [(match_operand:SI 0 "register_operand" "") ; index to jump on
  15312. + (match_operand:SI 1 "const_int_operand" "") ; lower bound
  15313. + (match_operand:SI 2 "const_int_operand" "") ; total range
  15314. + (match_operand:SI 3 "" "") ; table label
  15315. + (match_operand:SI 4 "" "")] ; Out of range label
  15316. + ""
  15317. + "
  15318. + {
  15319. + rtx reg;
  15320. + rtx index = operands[0];
  15321. + rtx low_bound = operands[1];
  15322. + rtx range = operands[2];
  15323. + rtx table_label = operands[3];
  15324. + rtx oor_label = operands[4];
  15325. +
  15326. + index = force_reg ( SImode, index );
  15327. + if (low_bound != const0_rtx)
  15328. + {
  15329. + if (!avr32_const_ok_for_constraint_p(INTVAL (low_bound), 'I', \"Is21\")){
  15330. + reg = force_reg(SImode, GEN_INT (INTVAL (low_bound)));
  15331. + emit_insn (gen_subsi3 (reg, index,
  15332. + reg));
  15333. + } else {
  15334. + reg = gen_reg_rtx (SImode);
  15335. + emit_insn (gen_addsi3 (reg, index,
  15336. + GEN_INT (-INTVAL (low_bound))));
  15337. + }
  15338. + index = reg;
  15339. + }
  15340. +
  15341. + if (!avr32_const_ok_for_constraint_p (INTVAL (range), 'K', \"Ks21\"))
  15342. + range = force_reg (SImode, range);
  15343. +
  15344. + emit_cmp_and_jump_insns ( index, range, GTU, NULL_RTX, SImode, 1, oor_label );
  15345. + reg = gen_reg_rtx (SImode);
  15346. + emit_move_insn ( reg, gen_rtx_LABEL_REF (VOIDmode, table_label));
  15347. +
  15348. + if ( flag_pic )
  15349. + emit_jump_insn ( gen_tablejump_add ( reg, index, GEN_INT(4), table_label));
  15350. + else
  15351. + emit_jump_insn (
  15352. + gen_tablejump_insn ( gen_rtx_MEM ( SImode,
  15353. + gen_rtx_PLUS ( SImode,
  15354. + reg,
  15355. + gen_rtx_MULT ( SImode,
  15356. + index,
  15357. + GEN_INT(4)))),
  15358. + table_label));
  15359. + DONE;
  15360. + }"
  15361. +)
  15362. +
  15363. +
  15364. +
  15365. +(define_insn "prefetch"
  15366. + [(prefetch (match_operand:SI 0 "avr32_ks16_address_operand" "p")
  15367. + (match_operand 1 "const_int_operand" "")
  15368. + (match_operand 2 "const_int_operand" ""))]
  15369. + ""
  15370. + {
  15371. + return "pref\t%0";
  15372. + }
  15373. +
  15374. + [(set_attr "length" "4")
  15375. + (set_attr "type" "load")
  15376. + (set_attr "cc" "none")])
  15377. +
  15378. +
  15379. +
  15380. +;;=============================================================================
  15381. +;; prologue
  15382. +;;-----------------------------------------------------------------------------
  15383. +;; This pattern, if defined, emits RTL for entry to a function. The function
  15384. +;; entry i responsible for setting up the stack frame, initializing the frame
  15385. +;; pointer register, saving callee saved registers, etc.
  15386. +;;=============================================================================
  15387. +(define_expand "prologue"
  15388. + [(clobber (const_int 0))]
  15389. + ""
  15390. + "
  15391. + avr32_expand_prologue();
  15392. + DONE;
  15393. + "
  15394. + )
  15395. +
  15396. +;;=============================================================================
  15397. +;; eh_return
  15398. +;;-----------------------------------------------------------------------------
  15399. +;; This pattern, if defined, affects the way __builtin_eh_return, and
  15400. +;; thence the call frame exception handling library routines, are
  15401. +;; built. It is intended to handle non-trivial actions needed along
  15402. +;; the abnormal return path.
  15403. +;;
  15404. +;; The address of the exception handler to which the function should
  15405. +;; return is passed as operand to this pattern. It will normally need
  15406. +;; to copied by the pattern to some special register or memory
  15407. +;; location. If the pattern needs to determine the location of the
  15408. +;; target call frame in order to do so, it may use
  15409. +;; EH_RETURN_STACKADJ_RTX, if defined; it will have already been
  15410. +;; assigned.
  15411. +;;
  15412. +;; If this pattern is not defined, the default action will be to
  15413. +;; simply copy the return address to EH_RETURN_HANDLER_RTX. Either
  15414. +;; that macro or this pattern needs to be defined if call frame
  15415. +;; exception handling is to be used.
  15416. +
  15417. +;; We can't expand this before we know where the link register is stored.
  15418. +(define_insn_and_split "eh_return"
  15419. + [(unspec_volatile [(match_operand:SI 0 "register_operand" "r")]
  15420. + VUNSPEC_EH_RETURN)
  15421. + (clobber (match_scratch:SI 1 "=&r"))]
  15422. + ""
  15423. + "#"
  15424. + "reload_completed"
  15425. + [(const_int 0)]
  15426. + "
  15427. + {
  15428. + avr32_set_return_address (operands[0], operands[1]);
  15429. + DONE;
  15430. + }"
  15431. + )
  15432. +
  15433. +
  15434. +;;=============================================================================
  15435. +;; ffssi2
  15436. +;;-----------------------------------------------------------------------------
  15437. +(define_insn "ffssi2"
  15438. + [ (set (match_operand:SI 0 "register_operand" "=r")
  15439. + (ffs:SI (match_operand:SI 1 "register_operand" "r"))) ]
  15440. + ""
  15441. + "mov %0, %1
  15442. + brev %0
  15443. + clz %0, %0
  15444. + sub %0, -1
  15445. + cp %0, 33
  15446. + moveq %0, 0"
  15447. + [(set_attr "length" "18")
  15448. + (set_attr "cc" "clobber")]
  15449. + )
  15450. +
  15451. +
  15452. +
  15453. +;;=============================================================================
  15454. +;; swap_h
  15455. +;;-----------------------------------------------------------------------------
  15456. +(define_insn "*swap_h"
  15457. + [ (set (match_operand:SI 0 "register_operand" "=r")
  15458. + (ior:SI (ashift:SI (match_dup 0) (const_int 16))
  15459. + (lshiftrt:SI (match_dup 0) (const_int 16))))]
  15460. + ""
  15461. + "swap.h %0"
  15462. + [(set_attr "length" "2")]
  15463. + )
  15464. +
  15465. +(define_insn_and_split "bswap_16"
  15466. + [ (set (match_operand:HI 0 "avr32_bswap_operand" "=r,RKs13,r")
  15467. + (ior:HI (and:HI (lshiftrt:HI (match_operand:HI 1 "avr32_bswap_operand" "r,r,RKs13")
  15468. + (const_int 8))
  15469. + (const_int 255))
  15470. + (ashift:HI (and:HI (match_dup 1)
  15471. + (const_int 255))
  15472. + (const_int 8))))]
  15473. + ""
  15474. + {
  15475. + switch ( which_alternative ){
  15476. + case 0:
  15477. + if ( REGNO(operands[0]) == REGNO(operands[1]))
  15478. + return "swap.bh\t%0";
  15479. + else
  15480. + return "mov\t%0, %1\;swap.bh\t%0";
  15481. + case 1:
  15482. + return "stswp.h\t%0, %1";
  15483. + case 2:
  15484. + return "ldswp.sh\t%0, %1";
  15485. + default:
  15486. + abort();
  15487. + }
  15488. + }
  15489. +
  15490. + "(reload_completed &&
  15491. + REG_P(operands[0]) && REG_P(operands[1])
  15492. + && (REGNO(operands[0]) != REGNO(operands[1])))"
  15493. + [(set (match_dup 0) (match_dup 1))
  15494. + (set (match_dup 0)
  15495. + (ior:HI (and:HI (lshiftrt:HI (match_dup 0)
  15496. + (const_int 8))
  15497. + (const_int 255))
  15498. + (ashift:HI (and:HI (match_dup 0)
  15499. + (const_int 255))
  15500. + (const_int 8))))]
  15501. + ""
  15502. +
  15503. + [(set_attr "length" "4,4,4")
  15504. + (set_attr "type" "alu,store,load_rm")]
  15505. + )
  15506. +
  15507. +(define_insn_and_split "bswap_32"
  15508. + [ (set (match_operand:SI 0 "avr32_bswap_operand" "=r,RKs14,r")
  15509. + (ior:SI (ior:SI (lshiftrt:SI (and:SI (match_operand:SI 1 "avr32_bswap_operand" "r,r,RKs14")
  15510. + (const_int -16777216))
  15511. + (const_int 24))
  15512. + (lshiftrt:SI (and:SI (match_dup 1)
  15513. + (const_int 16711680))
  15514. + (const_int 8)))
  15515. + (ior:SI (ashift:SI (and:SI (match_dup 1)
  15516. + (const_int 65280))
  15517. + (const_int 8))
  15518. + (ashift:SI (and:SI (match_dup 1)
  15519. + (const_int 255))
  15520. + (const_int 24)))))]
  15521. + ""
  15522. + {
  15523. + switch ( which_alternative ){
  15524. + case 0:
  15525. + if ( REGNO(operands[0]) == REGNO(operands[1]))
  15526. + return "swap.b\t%0";
  15527. + else
  15528. + return "#";
  15529. + case 1:
  15530. + return "stswp.w\t%0, %1";
  15531. + case 2:
  15532. + return "ldswp.w\t%0, %1";
  15533. + default:
  15534. + abort();
  15535. + }
  15536. + }
  15537. + "(reload_completed &&
  15538. + REG_P(operands[0]) && REG_P(operands[1])
  15539. + && (REGNO(operands[0]) != REGNO(operands[1])))"
  15540. + [(set (match_dup 0) (match_dup 1))
  15541. + (set (match_dup 0)
  15542. + (ior:SI (ior:SI (lshiftrt:SI (and:SI (match_dup 0)
  15543. + (const_int -16777216))
  15544. + (const_int 24))
  15545. + (lshiftrt:SI (and:SI (match_dup 0)
  15546. + (const_int 16711680))
  15547. + (const_int 8)))
  15548. + (ior:SI (ashift:SI (and:SI (match_dup 0)
  15549. + (const_int 65280))
  15550. + (const_int 8))
  15551. + (ashift:SI (and:SI (match_dup 0)
  15552. + (const_int 255))
  15553. + (const_int 24)))))]
  15554. + ""
  15555. +
  15556. + [(set_attr "length" "4,4,4")
  15557. + (set_attr "type" "alu,store,load_rm")]
  15558. + )
  15559. +
  15560. +
  15561. +;;=============================================================================
  15562. +;; blockage
  15563. +;;-----------------------------------------------------------------------------
  15564. +;; UNSPEC_VOLATILE is considered to use and clobber all hard registers and
  15565. +;; all of memory. This blocks insns from being moved across this point.
  15566. +
  15567. +(define_insn "blockage"
  15568. + [(unspec_volatile [(const_int 0)] VUNSPEC_BLOCKAGE)]
  15569. + ""
  15570. + ""
  15571. + [(set_attr "length" "0")]
  15572. +)
  15573. +
  15574. +;;=============================================================================
  15575. +;; clzsi2
  15576. +;;-----------------------------------------------------------------------------
  15577. +(define_insn "clzsi2"
  15578. + [ (set (match_operand:SI 0 "register_operand" "=r")
  15579. + (clz:SI (match_operand:SI 1 "register_operand" "r"))) ]
  15580. + ""
  15581. + "clz %0, %1"
  15582. + [(set_attr "length" "4")
  15583. + (set_attr "cc" "set_z")]
  15584. + )
  15585. +
  15586. +;;=============================================================================
  15587. +;; ctzsi2
  15588. +;;-----------------------------------------------------------------------------
  15589. +(define_insn "ctzsi2"
  15590. + [ (set (match_operand:SI 0 "register_operand" "=r,r")
  15591. + (ctz:SI (match_operand:SI 1 "register_operand" "0,r"))) ]
  15592. + ""
  15593. + "@
  15594. + brev\t%0\;clz\t%0, %0
  15595. + mov\t%0, %1\;brev\t%0\;clz\t%0, %0"
  15596. + [(set_attr "length" "8")
  15597. + (set_attr "cc" "set_z")]
  15598. + )
  15599. +
  15600. +;;=============================================================================
  15601. +;; cache instructions
  15602. +;;-----------------------------------------------------------------------------
  15603. +(define_insn "cache"
  15604. + [ (unspec_volatile [(match_operand:SI 0 "avr32_ks11_address_operand" "p")
  15605. + (match_operand:SI 1 "immediate_operand" "Ku05")] VUNSPEC_CACHE)]
  15606. + ""
  15607. + "cache %0, %1"
  15608. + [(set_attr "length" "4")]
  15609. + )
  15610. +
  15611. +(define_insn "sync"
  15612. + [ (unspec_volatile [(match_operand:SI 0 "immediate_operand" "Ku08")] VUNSPEC_SYNC)]
  15613. + ""
  15614. + "sync %0"
  15615. + [(set_attr "length" "4")]
  15616. + )
  15617. +
  15618. +;;=============================================================================
  15619. +;; TLB instructions
  15620. +;;-----------------------------------------------------------------------------
  15621. +(define_insn "tlbr"
  15622. + [ (unspec_volatile [(const_int 0)] VUNSPEC_TLBR)]
  15623. + ""
  15624. + "tlbr"
  15625. + [(set_attr "length" "2")]
  15626. + )
  15627. +
  15628. +(define_insn "tlbw"
  15629. + [ (unspec_volatile [(const_int 0)] VUNSPEC_TLBW)]
  15630. + ""
  15631. + "tlbw"
  15632. + [(set_attr "length" "2")]
  15633. + )
  15634. +
  15635. +(define_insn "tlbs"
  15636. + [ (unspec_volatile [(const_int 0)] VUNSPEC_TLBS)]
  15637. + ""
  15638. + "tlbs"
  15639. + [(set_attr "length" "2")]
  15640. + )
  15641. +
  15642. +;;=============================================================================
  15643. +;; Breakpoint instruction
  15644. +;;-----------------------------------------------------------------------------
  15645. +(define_insn "breakpoint"
  15646. + [ (unspec_volatile [(const_int 0)] VUNSPEC_BREAKPOINT)]
  15647. + ""
  15648. + "breakpoint"
  15649. + [(set_attr "length" "2")]
  15650. + )
  15651. +
  15652. +
  15653. +;;=============================================================================
  15654. +;; mtsr/mfsr instruction
  15655. +;;-----------------------------------------------------------------------------
  15656. +(define_insn "mtsr"
  15657. + [ (unspec_volatile [(match_operand 0 "immediate_operand" "i")
  15658. + (match_operand:SI 1 "register_operand" "r")] VUNSPEC_MTSR)]
  15659. + ""
  15660. + "mtsr\t%0, %1"
  15661. + [(set_attr "length" "4")]
  15662. + )
  15663. +
  15664. +(define_insn "mfsr"
  15665. + [ (set (match_operand:SI 0 "register_operand" "=r")
  15666. + (unspec_volatile:SI [(match_operand 1 "immediate_operand" "i")] VUNSPEC_MFSR)) ]
  15667. + ""
  15668. + "mfsr\t%0, %1"
  15669. + [(set_attr "length" "4")]
  15670. + )
  15671. +
  15672. +;;=============================================================================
  15673. +;; mtdr/mfdr instruction
  15674. +;;-----------------------------------------------------------------------------
  15675. +(define_insn "mtdr"
  15676. + [ (unspec_volatile [(match_operand 0 "immediate_operand" "i")
  15677. + (match_operand:SI 1 "register_operand" "r")] VUNSPEC_MTDR)]
  15678. + ""
  15679. + "mtdr\t%0, %1"
  15680. + [(set_attr "length" "4")]
  15681. + )
  15682. +
  15683. +(define_insn "mfdr"
  15684. + [ (set (match_operand:SI 0 "register_operand" "=r")
  15685. + (unspec_volatile:SI [(match_operand 1 "immediate_operand" "i")] VUNSPEC_MFDR)) ]
  15686. + ""
  15687. + "mfdr\t%0, %1"
  15688. + [(set_attr "length" "4")]
  15689. + )
  15690. +
  15691. +;;=============================================================================
  15692. +;; musfr
  15693. +;;-----------------------------------------------------------------------------
  15694. +(define_insn "musfr"
  15695. + [ (unspec_volatile [(match_operand:SI 0 "register_operand" "r")] VUNSPEC_MUSFR)]
  15696. + ""
  15697. + "musfr\t%0"
  15698. + [(set_attr "length" "2")
  15699. + (set_attr "cc" "clobber")]
  15700. + )
  15701. +
  15702. +(define_insn "mustr"
  15703. + [ (set (match_operand:SI 0 "register_operand" "=r")
  15704. + (unspec_volatile:SI [(const_int 0)] VUNSPEC_MUSTR)) ]
  15705. + ""
  15706. + "mustr\t%0"
  15707. + [(set_attr "length" "2")]
  15708. + )
  15709. +
  15710. +(define_insn "ssrf"
  15711. + [ (unspec_volatile [(match_operand:SI 0 "immediate_operand" "Ku05")] VUNSPEC_SSRF)]
  15712. + ""
  15713. + "ssrf %0"
  15714. + [(set_attr "length" "2")
  15715. + (set_attr "cc" "clobber")]
  15716. + )
  15717. +
  15718. +(define_insn "csrf"
  15719. + [ (unspec_volatile [(match_operand:SI 0 "immediate_operand" "Ku05")] VUNSPEC_CSRF)]
  15720. + ""
  15721. + "csrf %0"
  15722. + [(set_attr "length" "2")
  15723. + (set_attr "cc" "clobber")]
  15724. + )
  15725. +
  15726. +;;=============================================================================
  15727. +;; Flush Return Stack instruction
  15728. +;;-----------------------------------------------------------------------------
  15729. +(define_insn "frs"
  15730. + [ (unspec_volatile [(const_int 0)] VUNSPEC_FRS)]
  15731. + ""
  15732. + "frs"
  15733. + [(set_attr "length" "2")
  15734. + (set_attr "cc" "none")]
  15735. + )
  15736. +
  15737. +
  15738. +;;=============================================================================
  15739. +;; Saturation Round Scale instruction
  15740. +;;-----------------------------------------------------------------------------
  15741. +(define_insn "sats"
  15742. + [ (set (match_operand:SI 0 "register_operand" "+r")
  15743. + (unspec:SI [(match_dup 0)
  15744. + (match_operand 1 "immediate_operand" "Ku05")
  15745. + (match_operand 2 "immediate_operand" "Ku05")]
  15746. + UNSPEC_SATS)) ]
  15747. + "TARGET_DSP"
  15748. + "sats\t%0 >> %1, %2"
  15749. + [(set_attr "type" "alu_sat")
  15750. + (set_attr "length" "4")]
  15751. + )
  15752. +
  15753. +(define_insn "satu"
  15754. + [ (set (match_operand:SI 0 "register_operand" "+r")
  15755. + (unspec:SI [(match_dup 0)
  15756. + (match_operand 1 "immediate_operand" "Ku05")
  15757. + (match_operand 2 "immediate_operand" "Ku05")]
  15758. + UNSPEC_SATU)) ]
  15759. + "TARGET_DSP"
  15760. + "satu\t%0 >> %1, %2"
  15761. + [(set_attr "type" "alu_sat")
  15762. + (set_attr "length" "4")]
  15763. + )
  15764. +
  15765. +(define_insn "satrnds"
  15766. + [ (set (match_operand:SI 0 "register_operand" "+r")
  15767. + (unspec:SI [(match_dup 0)
  15768. + (match_operand 1 "immediate_operand" "Ku05")
  15769. + (match_operand 2 "immediate_operand" "Ku05")]
  15770. + UNSPEC_SATRNDS)) ]
  15771. + "TARGET_DSP"
  15772. + "satrnds\t%0 >> %1, %2"
  15773. + [(set_attr "type" "alu_sat")
  15774. + (set_attr "length" "4")]
  15775. + )
  15776. +
  15777. +(define_insn "satrndu"
  15778. + [ (set (match_operand:SI 0 "register_operand" "+r")
  15779. + (unspec:SI [(match_dup 0)
  15780. + (match_operand 1 "immediate_operand" "Ku05")
  15781. + (match_operand 2 "immediate_operand" "Ku05")]
  15782. + UNSPEC_SATRNDU)) ]
  15783. + "TARGET_DSP"
  15784. + "sats\t%0 >> %1, %2"
  15785. + [(set_attr "type" "alu_sat")
  15786. + (set_attr "length" "4")]
  15787. + )
  15788. +
  15789. +(define_insn "sleep"
  15790. + [(unspec_volatile [(const_int 0)] VUNSPEC_SLEEP)
  15791. + (match_operand:SI 0 "const_int_operand" "")]
  15792. + ""
  15793. + "sleep %0"
  15794. + [(set_attr "length" "1")
  15795. + (set_attr "cc" "none")
  15796. + ])
  15797. +
  15798. +(define_expand "delay_cycles"
  15799. + [(unspec_volatile [(match_operand:SI 0 "const_int_operand" "i")]
  15800. + VUNSPEC_DELAY_CYCLES)]
  15801. + ""
  15802. + "
  15803. + unsigned int cycles = UINTVAL (operands[0]);
  15804. + if (IN_RANGE(cycles,0x10000 ,0xFFFFFFFF))
  15805. + {
  15806. + unsigned int msb = (cycles & 0xFFFF0000);
  15807. + unsigned int shift = 16;
  15808. + msb = (msb >> shift);
  15809. + unsigned int cycles_used = (msb*0x10000);
  15810. + emit_insn (gen_delay_cycles_2 (gen_int_mode (msb, SImode)));
  15811. + cycles -= cycles_used;
  15812. + }
  15813. + if (IN_RANGE(cycles, 4, 0xFFFF))
  15814. + {
  15815. + unsigned int loop_count = (cycles/ 4);
  15816. + unsigned int cycles_used = (loop_count*4);
  15817. + emit_insn (gen_delay_cycles_1 (gen_int_mode (loop_count, SImode)));
  15818. + cycles -= cycles_used;
  15819. + }
  15820. + while (cycles >= 3)
  15821. + {
  15822. + emit_insn (gen_nop3 ());
  15823. + cycles -= 3;
  15824. + }
  15825. + if (cycles == 1 || cycles == 2)
  15826. + {
  15827. + while (cycles--)
  15828. + emit_insn (gen_nop ());
  15829. + }
  15830. + DONE;
  15831. + ")
  15832. +
  15833. +(define_insn "delay_cycles_1"
  15834. +[(unspec_volatile [(const_int 0)] VUNSPEC_DELAY_CYCLES_1)
  15835. + (match_operand:SI 0 "immediate_operand" "")
  15836. + (clobber (match_scratch:SI 1 "=&r"))]
  15837. + ""
  15838. + "mov\t%1, %0
  15839. + 1: sub\t%1, 1
  15840. + brne\t1b
  15841. + nop"
  15842. +)
  15843. +
  15844. +(define_insn "delay_cycles_2"
  15845. +[(unspec_volatile [(const_int 0)] VUNSPEC_DELAY_CYCLES_2)
  15846. + (match_operand:SI 0 "immediate_operand" "")
  15847. + (clobber (match_scratch:SI 1 "=&r"))
  15848. + (clobber (match_scratch:SI 2 "=&r"))]
  15849. + ""
  15850. + "mov\t%1, %0
  15851. + 1: mov\t%2, 16383
  15852. + 2: sub\t%2, 1
  15853. + brne\t2b
  15854. + nop
  15855. + sub\t%1, 1
  15856. + brne\t1b
  15857. + nop"
  15858. +)
  15859. +
  15860. +;; CPU instructions
  15861. +
  15862. +;;=============================================================================
  15863. +;; nop
  15864. +;;-----------------------------------------------------------------------------
  15865. +;; No-op instruction.
  15866. +;;=============================================================================
  15867. +(define_insn "nop"
  15868. + [(unspec_volatile [(const_int 0)] VUNSPEC_NOP)]
  15869. + ""
  15870. + "nop"
  15871. + [(set_attr "length" "1")
  15872. + (set_attr "type" "alu")
  15873. + (set_attr "cc" "none")])
  15874. +
  15875. +;; NOP3
  15876. +(define_insn "nop3"
  15877. + [(unspec_volatile [(const_int 0)] VUNSPEC_NOP3)]
  15878. + ""
  15879. + "rjmp\t2"
  15880. + [(set_attr "length" "3")
  15881. + (set_attr "type" "alu")
  15882. + (set_attr "cc" "none")])
  15883. +
  15884. +;; Special patterns for dealing with the constant pool
  15885. +
  15886. +(define_insn "align_4"
  15887. + [(unspec_volatile [(const_int 0)] VUNSPEC_ALIGN)]
  15888. + ""
  15889. + {
  15890. + assemble_align (32);
  15891. + return "";
  15892. + }
  15893. + [(set_attr "length" "2")]
  15894. +)
  15895. +
  15896. +
  15897. +(define_insn "consttable_start"
  15898. + [(unspec_volatile [(const_int 0)] VUNSPEC_POOL_START)]
  15899. + ""
  15900. + {
  15901. + return ".cpool";
  15902. + }
  15903. + [(set_attr "length" "0")]
  15904. + )
  15905. +
  15906. +(define_insn "consttable_end"
  15907. + [(unspec_volatile [(const_int 0)] VUNSPEC_POOL_END)]
  15908. + ""
  15909. + {
  15910. + making_const_table = FALSE;
  15911. + return "";
  15912. + }
  15913. + [(set_attr "length" "0")]
  15914. +)
  15915. +
  15916. +
  15917. +(define_insn "consttable_4"
  15918. + [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_4)]
  15919. + ""
  15920. + {
  15921. + making_const_table = TRUE;
  15922. + switch (GET_MODE_CLASS (GET_MODE (operands[0])))
  15923. + {
  15924. + case MODE_FLOAT:
  15925. + {
  15926. + REAL_VALUE_TYPE r;
  15927. + char real_string[1024];
  15928. + REAL_VALUE_FROM_CONST_DOUBLE (r, operands[0]);
  15929. + real_to_decimal(real_string, &r, 1024, 0, 1);
  15930. + asm_fprintf (asm_out_file, "\t.float\t%s\n", real_string);
  15931. + break;
  15932. + }
  15933. + default:
  15934. + assemble_integer (operands[0], 4, 0, 1);
  15935. + break;
  15936. + }
  15937. + return "";
  15938. + }
  15939. + [(set_attr "length" "4")]
  15940. +)
  15941. +
  15942. +(define_insn "consttable_8"
  15943. + [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_8)]
  15944. + ""
  15945. + {
  15946. + making_const_table = TRUE;
  15947. + switch (GET_MODE_CLASS (GET_MODE (operands[0])))
  15948. + {
  15949. + case MODE_FLOAT:
  15950. + {
  15951. + REAL_VALUE_TYPE r;
  15952. + char real_string[1024];
  15953. + REAL_VALUE_FROM_CONST_DOUBLE (r, operands[0]);
  15954. + real_to_decimal(real_string, &r, 1024, 0, 1);
  15955. + asm_fprintf (asm_out_file, "\t.double\t%s\n", real_string);
  15956. + break;
  15957. + }
  15958. + default:
  15959. + assemble_integer(operands[0], 8, 0, 1);
  15960. + break;
  15961. + }
  15962. + return "";
  15963. + }
  15964. + [(set_attr "length" "8")]
  15965. +)
  15966. +
  15967. +(define_insn "consttable_16"
  15968. + [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_16)]
  15969. + ""
  15970. + {
  15971. + making_const_table = TRUE;
  15972. + assemble_integer(operands[0], 16, 0, 1);
  15973. + return "";
  15974. + }
  15975. + [(set_attr "length" "16")]
  15976. +)
  15977. +
  15978. +;;=============================================================================
  15979. +;; coprocessor instructions
  15980. +;;-----------------------------------------------------------------------------
  15981. +(define_insn "cop"
  15982. + [ (unspec_volatile [(match_operand 0 "immediate_operand" "Ku03")
  15983. + (match_operand 1 "immediate_operand" "Ku04")
  15984. + (match_operand 2 "immediate_operand" "Ku04")
  15985. + (match_operand 3 "immediate_operand" "Ku04")
  15986. + (match_operand 4 "immediate_operand" "Ku07")] VUNSPEC_COP)]
  15987. + ""
  15988. + "cop\tcp%0, cr%1, cr%2, cr%3, %4"
  15989. + [(set_attr "length" "4")]
  15990. + )
  15991. +
  15992. +(define_insn "mvcrsi"
  15993. + [ (set (match_operand:SI 0 "avr32_cop_move_operand" "=r,<,Z")
  15994. + (unspec_volatile:SI [(match_operand 1 "immediate_operand" "Ku03,Ku03,Ku03")
  15995. + (match_operand 2 "immediate_operand" "Ku04,Ku04,Ku04")]
  15996. + VUNSPEC_MVCR)) ]
  15997. + ""
  15998. + "@
  15999. + mvcr.w\tcp%1, %0, cr%2
  16000. + stcm.w\tcp%1, %0, cr%2
  16001. + stc.w\tcp%1, %0, cr%2"
  16002. + [(set_attr "length" "4")]
  16003. + )
  16004. +
  16005. +(define_insn "mvcrdi"
  16006. + [ (set (match_operand:DI 0 "avr32_cop_move_operand" "=r,<,Z")
  16007. + (unspec_volatile:DI [(match_operand 1 "immediate_operand" "Ku03,Ku03,Ku03")
  16008. + (match_operand 2 "immediate_operand" "Ku04,Ku04,Ku04")]
  16009. + VUNSPEC_MVCR)) ]
  16010. + ""
  16011. + "@
  16012. + mvcr.d\tcp%1, %0, cr%2
  16013. + stcm.d\tcp%1, %0, cr%2-cr%i2
  16014. + stc.d\tcp%1, %0, cr%2"
  16015. + [(set_attr "length" "4")]
  16016. + )
  16017. +
  16018. +(define_insn "mvrcsi"
  16019. + [ (unspec_volatile:SI [(match_operand 0 "immediate_operand" "Ku03,Ku03,Ku03")
  16020. + (match_operand 1 "immediate_operand" "Ku04,Ku04,Ku04")
  16021. + (match_operand:SI 2 "avr32_cop_move_operand" "r,>,Z")]
  16022. + VUNSPEC_MVRC)]
  16023. + ""
  16024. + {
  16025. + switch (which_alternative){
  16026. + case 0:
  16027. + return "mvrc.w\tcp%0, cr%1, %2";
  16028. + case 1:
  16029. + return "ldcm.w\tcp%0, %2, cr%1";
  16030. + case 2:
  16031. + return "ldc.w\tcp%0, cr%1, %2";
  16032. + default:
  16033. + abort();
  16034. + }
  16035. + }
  16036. + [(set_attr "length" "4")]
  16037. + )
  16038. +
  16039. +(define_insn "mvrcdi"
  16040. + [ (unspec_volatile:DI [(match_operand 0 "immediate_operand" "Ku03,Ku03,Ku03")
  16041. + (match_operand 1 "immediate_operand" "Ku04,Ku04,Ku04")
  16042. + (match_operand:DI 2 "avr32_cop_move_operand" "r,>,Z")]
  16043. + VUNSPEC_MVRC)]
  16044. + ""
  16045. + {
  16046. + switch (which_alternative){
  16047. + case 0:
  16048. + return "mvrc.d\tcp%0, cr%1, %2";
  16049. + case 1:
  16050. + return "ldcm.d\tcp%0, %2, cr%1-cr%i1";
  16051. + case 2:
  16052. + return "ldc.d\tcp%0, cr%1, %2";
  16053. + default:
  16054. + abort();
  16055. + }
  16056. + }
  16057. + [(set_attr "length" "4")]
  16058. + )
  16059. +
  16060. +;;=============================================================================
  16061. +;; epilogue
  16062. +;;-----------------------------------------------------------------------------
  16063. +;; This pattern emits RTL for exit from a function. The function exit is
  16064. +;; responsible for deallocating the stack frame, restoring callee saved
  16065. +;; registers and emitting the return instruction.
  16066. +;; ToDo: using TARGET_ASM_FUNCTION_PROLOGUE instead.
  16067. +;;=============================================================================
  16068. +(define_expand "epilogue"
  16069. + [(unspec_volatile [(return)] VUNSPEC_EPILOGUE)]
  16070. + ""
  16071. + "
  16072. + if (USE_RETURN_INSN (FALSE)){
  16073. + emit_jump_insn (gen_return ());
  16074. + DONE;
  16075. + }
  16076. + emit_jump_insn (gen_rtx_UNSPEC_VOLATILE (VOIDmode,
  16077. + gen_rtvec (1,
  16078. + gen_rtx_RETURN (VOIDmode)),
  16079. + VUNSPEC_EPILOGUE));
  16080. + DONE;
  16081. + "
  16082. + )
  16083. +
  16084. +(define_insn "*epilogue_insns"
  16085. + [(unspec_volatile [(return)] VUNSPEC_EPILOGUE)]
  16086. + ""
  16087. + {
  16088. + avr32_output_return_instruction (FALSE, FALSE, NULL, NULL);
  16089. + return "";
  16090. + }
  16091. + ; Length is absolute worst case
  16092. + [(set_attr "type" "branch")
  16093. + (set_attr "length" "12")]
  16094. + )
  16095. +
  16096. +(define_insn "*epilogue_insns_ret_imm"
  16097. + [(parallel [(set (reg RETVAL_REGNUM) (match_operand 0 "immediate_operand" "i"))
  16098. + (use (reg RETVAL_REGNUM))
  16099. + (unspec_volatile [(return)] VUNSPEC_EPILOGUE)])]
  16100. + "((INTVAL(operands[0]) == -1) || (INTVAL(operands[0]) == 0) || (INTVAL(operands[0]) == 1))"
  16101. + {
  16102. + avr32_output_return_instruction (FALSE, FALSE, NULL, operands[0]);
  16103. + return "";
  16104. + }
  16105. + ; Length is absolute worst case
  16106. + [(set_attr "type" "branch")
  16107. + (set_attr "length" "12")]
  16108. + )
  16109. +
  16110. +(define_insn "sibcall_epilogue"
  16111. + [(unspec_volatile [(const_int 0)] VUNSPEC_EPILOGUE)]
  16112. + ""
  16113. + {
  16114. + avr32_output_return_instruction (FALSE, FALSE, NULL, NULL);
  16115. + return "";
  16116. + }
  16117. +;; Length is absolute worst case
  16118. + [(set_attr "type" "branch")
  16119. + (set_attr "length" "12")]
  16120. + )
  16121. +
  16122. +(define_insn "*sibcall_epilogue_insns_ret_imm"
  16123. + [(parallel [(set (reg RETVAL_REGNUM) (match_operand 0 "immediate_operand" "i"))
  16124. + (use (reg RETVAL_REGNUM))
  16125. + (unspec_volatile [(const_int 0)] VUNSPEC_EPILOGUE)])]
  16126. + "((INTVAL(operands[0]) == -1) || (INTVAL(operands[0]) == 0) || (INTVAL(operands[0]) == 1))"
  16127. + {
  16128. + avr32_output_return_instruction (FALSE, FALSE, NULL, operands[0]);
  16129. + return "";
  16130. + }
  16131. + ; Length is absolute worst case
  16132. + [(set_attr "type" "branch")
  16133. + (set_attr "length" "12")]
  16134. + )
  16135. +
  16136. +(define_insn "ldxi"
  16137. + [(set (match_operand:SI 0 "register_operand" "=r")
  16138. + (mem:SI (plus:SI
  16139. + (match_operand:SI 1 "register_operand" "r")
  16140. + (mult:SI (zero_extract:SI (match_operand:SI 2 "register_operand" "r")
  16141. + (const_int 8)
  16142. + (match_operand:SI 3 "immediate_operand" "Ku05"))
  16143. + (const_int 4)))))]
  16144. + "(INTVAL(operands[3]) == 24 || INTVAL(operands[3]) == 16 || INTVAL(operands[3]) == 8
  16145. + || INTVAL(operands[3]) == 0)"
  16146. + {
  16147. + switch ( INTVAL(operands[3]) ){
  16148. + case 0:
  16149. + return "ld.w %0, %1[%2:b << 2]";
  16150. + case 8:
  16151. + return "ld.w %0, %1[%2:l << 2]";
  16152. + case 16:
  16153. + return "ld.w %0, %1[%2:u << 2]";
  16154. + case 24:
  16155. + return "ld.w %0, %1[%2:t << 2]";
  16156. + default:
  16157. + internal_error("illegal operand for ldxi");
  16158. + }
  16159. + }
  16160. + [(set_attr "type" "load")
  16161. + (set_attr "length" "4")
  16162. + (set_attr "cc" "none")])
  16163. +
  16164. +
  16165. +
  16166. +
  16167. +
  16168. +
  16169. +;;=============================================================================
  16170. +;; Peephole optimizing
  16171. +;;-----------------------------------------------------------------------------
  16172. +;; Changing
  16173. +;; sub r8, r7, 8
  16174. +;; st.w r8[0x0], r12
  16175. +;; to
  16176. +;; sub r8, r7, 8
  16177. +;; st.w r7[-0x8], r12
  16178. +;;=============================================================================
  16179. +; (set (reg:SI 9 r8)
  16180. +; (plus:SI (reg/f:SI 6 r7)
  16181. +; (const_int ...)))
  16182. +; (set (mem:SI (reg:SI 9 r8))
  16183. +; (reg:SI 12 r12))
  16184. +(define_peephole2
  16185. + [(set (match_operand:SI 0 "register_operand" "")
  16186. + (plus:SI (match_operand:SI 1 "register_operand" "")
  16187. + (match_operand:SI 2 "immediate_operand" "")))
  16188. + (set (mem:SI (match_dup 0))
  16189. + (match_operand:SI 3 "register_operand" ""))]
  16190. + "REGNO(operands[0]) != REGNO(operands[1]) && avr32_const_ok_for_constraint_p(INTVAL(operands[2]), 'K', \"Ks16\")"
  16191. + [(set (match_dup 0)
  16192. + (plus:SI (match_dup 1)
  16193. + (match_dup 2)))
  16194. + (set (mem:SI (plus:SI (match_dup 1)
  16195. + (match_dup 2)))
  16196. + (match_dup 3))]
  16197. + "")
  16198. +
  16199. +;;=============================================================================
  16200. +;; Peephole optimizing
  16201. +;;-----------------------------------------------------------------------------
  16202. +;; Changing
  16203. +;; sub r6, r7, 4
  16204. +;; ld.w r6, r6[0x0]
  16205. +;; to
  16206. +;; sub r6, r7, 4
  16207. +;; ld.w r6, r7[-0x4]
  16208. +;;=============================================================================
  16209. +; (set (reg:SI 7 r6)
  16210. +; (plus:SI (reg/f:SI 6 r7)
  16211. +; (const_int -4 [0xfffffffc])))
  16212. +; (set (reg:SI 7 r6)
  16213. +; (mem:SI (reg:SI 7 r6)))
  16214. +(define_peephole2
  16215. + [(set (match_operand:SI 0 "register_operand" "")
  16216. + (plus:SI (match_operand:SI 1 "register_operand" "")
  16217. + (match_operand:SI 2 "immediate_operand" "")))
  16218. + (set (match_operand:SI 3 "register_operand" "")
  16219. + (mem:SI (match_dup 0)))]
  16220. + "REGNO(operands[0]) != REGNO(operands[1]) && avr32_const_ok_for_constraint_p(INTVAL(operands[2]), 'K', \"Ks16\")"
  16221. + [(set (match_dup 0)
  16222. + (plus:SI (match_dup 1)
  16223. + (match_dup 2)))
  16224. + (set (match_dup 3)
  16225. + (mem:SI (plus:SI (match_dup 1)
  16226. + (match_dup 2))))]
  16227. + "")
  16228. +
  16229. +;;=============================================================================
  16230. +;; Peephole optimizing
  16231. +;;-----------------------------------------------------------------------------
  16232. +;; Changing
  16233. +;; ld.sb r0, r7[-0x6]
  16234. +;; cashs.b r0
  16235. +;; to
  16236. +;; ld.sb r0, r7[-0x6]
  16237. +;;=============================================================================
  16238. +(define_peephole2
  16239. + [(set (match_operand:QI 0 "register_operand" "")
  16240. + (match_operand:QI 1 "load_sb_memory_operand" ""))
  16241. + (set (match_operand:SI 2 "register_operand" "")
  16242. + (sign_extend:SI (match_dup 0)))]
  16243. + "(REGNO(operands[0]) == REGNO(operands[2]) || peep2_reg_dead_p(2, operands[0]))"
  16244. + [(set (match_dup 2)
  16245. + (sign_extend:SI (match_dup 1)))]
  16246. + "")
  16247. +
  16248. +;;=============================================================================
  16249. +;; Peephole optimizing
  16250. +;;-----------------------------------------------------------------------------
  16251. +;; Changing
  16252. +;; ld.ub r0, r7[-0x6]
  16253. +;; cashu.b r0
  16254. +;; to
  16255. +;; ld.ub r0, r7[-0x6]
  16256. +;;=============================================================================
  16257. +(define_peephole2
  16258. + [(set (match_operand:QI 0 "register_operand" "")
  16259. + (match_operand:QI 1 "memory_operand" ""))
  16260. + (set (match_operand:SI 2 "register_operand" "")
  16261. + (zero_extend:SI (match_dup 0)))]
  16262. + "(REGNO(operands[0]) == REGNO(operands[2])) || peep2_reg_dead_p(2, operands[0])"
  16263. + [(set (match_dup 2)
  16264. + (zero_extend:SI (match_dup 1)))]
  16265. + "")
  16266. +
  16267. +;;=============================================================================
  16268. +;; Peephole optimizing
  16269. +;;-----------------------------------------------------------------------------
  16270. +;; Changing
  16271. +;; ld.sh r0, r7[-0x6]
  16272. +;; casts.h r0
  16273. +;; to
  16274. +;; ld.sh r0, r7[-0x6]
  16275. +;;=============================================================================
  16276. +(define_peephole2
  16277. + [(set (match_operand:HI 0 "register_operand" "")
  16278. + (match_operand:HI 1 "memory_operand" ""))
  16279. + (set (match_operand:SI 2 "register_operand" "")
  16280. + (sign_extend:SI (match_dup 0)))]
  16281. + "(REGNO(operands[0]) == REGNO(operands[2])) || peep2_reg_dead_p(2, operands[0])"
  16282. + [(set (match_dup 2)
  16283. + (sign_extend:SI (match_dup 1)))]
  16284. + "")
  16285. +
  16286. +;;=============================================================================
  16287. +;; Peephole optimizing
  16288. +;;-----------------------------------------------------------------------------
  16289. +;; Changing
  16290. +;; ld.uh r0, r7[-0x6]
  16291. +;; castu.h r0
  16292. +;; to
  16293. +;; ld.uh r0, r7[-0x6]
  16294. +;;=============================================================================
  16295. +(define_peephole2
  16296. + [(set (match_operand:HI 0 "register_operand" "")
  16297. + (match_operand:HI 1 "memory_operand" ""))
  16298. + (set (match_operand:SI 2 "register_operand" "")
  16299. + (zero_extend:SI (match_dup 0)))]
  16300. + "(REGNO(operands[0]) == REGNO(operands[2])) || peep2_reg_dead_p(2, operands[0])"
  16301. + [(set (match_dup 2)
  16302. + (zero_extend:SI (match_dup 1)))]
  16303. + "")
  16304. +
  16305. +;;=============================================================================
  16306. +;; Peephole optimizing
  16307. +;;-----------------------------------------------------------------------------
  16308. +;; Changing
  16309. +;; mul rd, rx, ry
  16310. +;; add rd2, rd
  16311. +;; or
  16312. +;; add rd2, rd, rd2
  16313. +;; to
  16314. +;; mac rd2, rx, ry
  16315. +;;=============================================================================
  16316. +(define_peephole2
  16317. + [(set (match_operand:SI 0 "register_operand" "")
  16318. + (mult:SI (match_operand:SI 1 "register_operand" "")
  16319. + (match_operand:SI 2 "register_operand" "")))
  16320. + (set (match_operand:SI 3 "register_operand" "")
  16321. + (plus:SI (match_dup 3)
  16322. + (match_dup 0)))]
  16323. + "peep2_reg_dead_p(2, operands[0])"
  16324. + [(set (match_dup 3)
  16325. + (plus:SI (mult:SI (match_dup 1)
  16326. + (match_dup 2))
  16327. + (match_dup 3)))]
  16328. + "")
  16329. +
  16330. +(define_peephole2
  16331. + [(set (match_operand:SI 0 "register_operand" "")
  16332. + (mult:SI (match_operand:SI 1 "register_operand" "")
  16333. + (match_operand:SI 2 "register_operand" "")))
  16334. + (set (match_operand:SI 3 "register_operand" "")
  16335. + (plus:SI (match_dup 0)
  16336. + (match_dup 3)))]
  16337. + "peep2_reg_dead_p(2, operands[0])"
  16338. + [(set (match_dup 3)
  16339. + (plus:SI (mult:SI (match_dup 1)
  16340. + (match_dup 2))
  16341. + (match_dup 3)))]
  16342. + "")
  16343. +
  16344. +
  16345. +;;=============================================================================
  16346. +;; Peephole optimizing
  16347. +;;-----------------------------------------------------------------------------
  16348. +;; Changing
  16349. +;; bfextu rd, rs, k5, 1 or and(h/l) rd, one_bit_set_mask
  16350. +;; to
  16351. +;; bld rs, k5
  16352. +;;
  16353. +;; If rd is dead after the operation.
  16354. +;;=============================================================================
  16355. +(define_peephole2
  16356. + [ (set (match_operand:SI 0 "register_operand" "")
  16357. + (zero_extract:SI (match_operand:SI 1 "register_operand" "")
  16358. + (const_int 1)
  16359. + (match_operand:SI 2 "immediate_operand" "")))
  16360. + (set (cc0)
  16361. + (match_dup 0))]
  16362. + "peep2_reg_dead_p(2, operands[0])"
  16363. + [(set (cc0)
  16364. + (and:SI (match_dup 1)
  16365. + (match_dup 2)))]
  16366. + "operands[2] = GEN_INT(1 << INTVAL(operands[2]));")
  16367. +
  16368. +(define_peephole2
  16369. + [ (set (match_operand:SI 0 "register_operand" "")
  16370. + (and:SI (match_operand:SI 1 "register_operand" "")
  16371. + (match_operand:SI 2 "one_bit_set_operand" "")))
  16372. + (set (cc0)
  16373. + (match_dup 0))]
  16374. + "peep2_reg_dead_p(2, operands[0])"
  16375. + [(set (cc0)
  16376. + (and:SI (match_dup 1)
  16377. + (match_dup 2)))]
  16378. + "")
  16379. +
  16380. +;;=============================================================================
  16381. +;; Peephole optimizing
  16382. +;;-----------------------------------------------------------------------------
  16383. +;; Load with extracted index: ld.w Rd, Rb[Ri:{t/u/b/l} << 2]
  16384. +;;
  16385. +;;=============================================================================
  16386. +
  16387. +
  16388. +(define_peephole
  16389. + [(set (match_operand:SI 0 "register_operand" "")
  16390. + (zero_extract:SI (match_operand:SI 1 "register_operand" "")
  16391. + (const_int 8)
  16392. + (match_operand:SI 2 "avr32_extract_shift_operand" "")))
  16393. + (set (match_operand:SI 3 "register_operand" "")
  16394. + (mem:SI (plus:SI (mult:SI (match_dup 0) (const_int 4))
  16395. + (match_operand:SI 4 "register_operand" ""))))]
  16396. +
  16397. + "(dead_or_set_p(insn, operands[0]))"
  16398. + {
  16399. + switch ( INTVAL(operands[2]) ){
  16400. + case 0:
  16401. + return "ld.w %3, %4[%1:b << 2]";
  16402. + case 8:
  16403. + return "ld.w %3, %4[%1:l << 2]";
  16404. + case 16:
  16405. + return "ld.w %3, %4[%1:u << 2]";
  16406. + case 24:
  16407. + return "ld.w %3, %4[%1:t << 2]";
  16408. + default:
  16409. + internal_error("illegal operand for ldxi");
  16410. + }
  16411. + }
  16412. + [(set_attr "type" "load")
  16413. + (set_attr "length" "4")
  16414. + (set_attr "cc" "clobber")]
  16415. + )
  16416. +
  16417. +
  16418. +
  16419. +(define_peephole
  16420. + [(set (match_operand:SI 0 "register_operand" "")
  16421. + (and:SI (match_operand:SI 1 "register_operand" "") (const_int 255)))
  16422. + (set (match_operand:SI 2 "register_operand" "")
  16423. + (mem:SI (plus:SI (mult:SI (match_dup 0) (const_int 4))
  16424. + (match_operand:SI 3 "register_operand" ""))))]
  16425. +
  16426. + "(dead_or_set_p(insn, operands[0]))"
  16427. +
  16428. + "ld.w %2, %3[%1:b << 2]"
  16429. + [(set_attr "type" "load")
  16430. + (set_attr "length" "4")
  16431. + (set_attr "cc" "clobber")]
  16432. + )
  16433. +
  16434. +
  16435. +(define_peephole2
  16436. + [(set (match_operand:SI 0 "register_operand" "")
  16437. + (zero_extract:SI (match_operand:SI 1 "register_operand" "")
  16438. + (const_int 8)
  16439. + (match_operand:SI 2 "avr32_extract_shift_operand" "")))
  16440. + (set (match_operand:SI 3 "register_operand" "")
  16441. + (mem:SI (plus:SI (mult:SI (match_dup 0) (const_int 4))
  16442. + (match_operand:SI 4 "register_operand" ""))))]
  16443. +
  16444. + "(peep2_reg_dead_p(2, operands[0]))
  16445. + || (REGNO(operands[0]) == REGNO(operands[3]))"
  16446. + [(set (match_dup 3)
  16447. + (mem:SI (plus:SI
  16448. + (match_dup 4)
  16449. + (mult:SI (zero_extract:SI (match_dup 1)
  16450. + (const_int 8)
  16451. + (match_dup 2))
  16452. + (const_int 4)))))]
  16453. + )
  16454. +
  16455. +(define_peephole2
  16456. + [(set (match_operand:SI 0 "register_operand" "")
  16457. + (zero_extend:SI (match_operand:QI 1 "register_operand" "")))
  16458. + (set (match_operand:SI 2 "register_operand" "")
  16459. + (mem:SI (plus:SI (mult:SI (match_dup 0) (const_int 4))
  16460. + (match_operand:SI 3 "register_operand" ""))))]
  16461. +
  16462. + "(peep2_reg_dead_p(2, operands[0]))
  16463. + || (REGNO(operands[0]) == REGNO(operands[2]))"
  16464. + [(set (match_dup 2)
  16465. + (mem:SI (plus:SI
  16466. + (match_dup 3)
  16467. + (mult:SI (zero_extract:SI (match_dup 1)
  16468. + (const_int 8)
  16469. + (const_int 0))
  16470. + (const_int 4)))))]
  16471. + "operands[1] = gen_rtx_REG(SImode, REGNO(operands[1]));"
  16472. + )
  16473. +
  16474. +
  16475. +(define_peephole2
  16476. + [(set (match_operand:SI 0 "register_operand" "")
  16477. + (and:SI (match_operand:SI 1 "register_operand" "")
  16478. + (const_int 255)))
  16479. + (set (match_operand:SI 2 "register_operand" "")
  16480. + (mem:SI (plus:SI (mult:SI (match_dup 0) (const_int 4))
  16481. + (match_operand:SI 3 "register_operand" ""))))]
  16482. +
  16483. + "(peep2_reg_dead_p(2, operands[0]))
  16484. + || (REGNO(operands[0]) == REGNO(operands[2]))"
  16485. + [(set (match_dup 2)
  16486. + (mem:SI (plus:SI
  16487. + (match_dup 3)
  16488. + (mult:SI (zero_extract:SI (match_dup 1)
  16489. + (const_int 8)
  16490. + (const_int 0))
  16491. + (const_int 4)))))]
  16492. + ""
  16493. + )
  16494. +
  16495. +
  16496. +
  16497. +(define_peephole2
  16498. + [(set (match_operand:SI 0 "register_operand" "")
  16499. + (lshiftrt:SI (match_operand:SI 1 "register_operand" "")
  16500. + (const_int 24)))
  16501. + (set (match_operand:SI 2 "register_operand" "")
  16502. + (mem:SI (plus:SI (mult:SI (match_dup 0) (const_int 4))
  16503. + (match_operand:SI 3 "register_operand" ""))))]
  16504. +
  16505. + "(peep2_reg_dead_p(2, operands[0]))
  16506. + || (REGNO(operands[0]) == REGNO(operands[2]))"
  16507. + [(set (match_dup 2)
  16508. + (mem:SI (plus:SI
  16509. + (match_dup 3)
  16510. + (mult:SI (zero_extract:SI (match_dup 1)
  16511. + (const_int 8)
  16512. + (const_int 24))
  16513. + (const_int 4)))))]
  16514. + ""
  16515. + )
  16516. +
  16517. +
  16518. +;;************************************************
  16519. +;; ANDN
  16520. +;;
  16521. +;;************************************************
  16522. +
  16523. +
  16524. +(define_peephole2
  16525. + [(set (match_operand:SI 0 "register_operand" "")
  16526. + (not:SI (match_operand:SI 1 "register_operand" "")))
  16527. + (set (match_operand:SI 2 "register_operand" "")
  16528. + (and:SI (match_dup 2)
  16529. + (match_dup 0)))]
  16530. + "peep2_reg_dead_p(2, operands[0])"
  16531. +
  16532. + [(set (match_dup 2)
  16533. + (and:SI (match_dup 2)
  16534. + (not:SI (match_dup 1))
  16535. + ))]
  16536. + ""
  16537. +)
  16538. +
  16539. +(define_peephole2
  16540. + [(set (match_operand:SI 0 "register_operand" "")
  16541. + (not:SI (match_operand:SI 1 "register_operand" "")))
  16542. + (set (match_operand:SI 2 "register_operand" "")
  16543. + (and:SI (match_dup 0)
  16544. + (match_dup 2)
  16545. + ))]
  16546. + "peep2_reg_dead_p(2, operands[0])"
  16547. +
  16548. + [(set (match_dup 2)
  16549. + (and:SI (match_dup 2)
  16550. + (not:SI (match_dup 1))
  16551. + ))]
  16552. +
  16553. + ""
  16554. +)
  16555. +
  16556. +
  16557. +;;=================================================================
  16558. +;; Addabs peephole
  16559. +;;=================================================================
  16560. +
  16561. +(define_peephole
  16562. + [(set (match_operand:SI 2 "register_operand" "=r")
  16563. + (abs:SI (match_operand:SI 1 "register_operand" "r")))
  16564. + (set (match_operand:SI 0 "register_operand" "=r")
  16565. + (plus:SI (match_operand:SI 3 "register_operand" "r")
  16566. + (match_dup 2)))]
  16567. + "dead_or_set_p(insn, operands[2])"
  16568. + "addabs %0, %3, %1"
  16569. + [(set_attr "length" "4")
  16570. + (set_attr "cc" "set_z")])
  16571. +
  16572. +(define_peephole
  16573. + [(set (match_operand:SI 2 "register_operand" "=r")
  16574. + (abs:SI (match_operand:SI 1 "register_operand" "r")))
  16575. + (set (match_operand:SI 0 "register_operand" "=r")
  16576. + (plus:SI (match_dup 2)
  16577. + (match_operand:SI 3 "register_operand" "r")))]
  16578. + "dead_or_set_p(insn, operands[2])"
  16579. + "addabs %0, %3, %1"
  16580. + [(set_attr "length" "4")
  16581. + (set_attr "cc" "set_z")])
  16582. +
  16583. +
  16584. +;;=================================================================
  16585. +;; Detect roundings
  16586. +;;=================================================================
  16587. +
  16588. +(define_insn "*round"
  16589. + [(set (match_operand:SI 0 "register_operand" "+r")
  16590. + (ashiftrt:SI (plus:SI (match_dup 0)
  16591. + (match_operand:SI 1 "immediate_operand" "i"))
  16592. + (match_operand:SI 2 "immediate_operand" "i")))]
  16593. + "avr32_rnd_operands(operands[1], operands[2])"
  16594. +
  16595. + "satrnds %0 >> %2, 31"
  16596. +
  16597. + [(set_attr "type" "alu_sat")
  16598. + (set_attr "length" "4")]
  16599. +
  16600. + )
  16601. +
  16602. +
  16603. +(define_peephole2
  16604. + [(set (match_operand:SI 0 "register_operand" "")
  16605. + (plus:SI (match_dup 0)
  16606. + (match_operand:SI 1 "immediate_operand" "")))
  16607. + (set (match_dup 0)
  16608. + (ashiftrt:SI (match_dup 0)
  16609. + (match_operand:SI 2 "immediate_operand" "")))]
  16610. + "avr32_rnd_operands(operands[1], operands[2])"
  16611. +
  16612. + [(set (match_dup 0)
  16613. + (ashiftrt:SI (plus:SI (match_dup 0)
  16614. + (match_dup 1))
  16615. + (match_dup 2)))]
  16616. + )
  16617. +
  16618. +(define_peephole
  16619. + [(set (match_operand:SI 0 "register_operand" "r")
  16620. + (plus:SI (match_dup 0)
  16621. + (match_operand:SI 1 "immediate_operand" "i")))
  16622. + (set (match_dup 0)
  16623. + (ashiftrt:SI (match_dup 0)
  16624. + (match_operand:SI 2 "immediate_operand" "i")))]
  16625. + "avr32_rnd_operands(operands[1], operands[2])"
  16626. +
  16627. + "satrnds %0 >> %2, 31"
  16628. +
  16629. + [(set_attr "type" "alu_sat")
  16630. + (set_attr "length" "4")
  16631. + (set_attr "cc" "clobber")]
  16632. +
  16633. + )
  16634. +
  16635. +
  16636. +;;=================================================================
  16637. +;; mcall
  16638. +;;=================================================================
  16639. +(define_peephole
  16640. + [(set (match_operand:SI 0 "register_operand" "")
  16641. + (match_operand 1 "avr32_const_pool_ref_operand" ""))
  16642. + (parallel [(call (mem:SI (match_dup 0))
  16643. + (match_operand 2 "" ""))
  16644. + (clobber (reg:SI LR_REGNUM))])]
  16645. + "dead_or_set_p(insn, operands[0])"
  16646. + "mcall %1"
  16647. + [(set_attr "type" "call")
  16648. + (set_attr "length" "4")
  16649. + (set_attr "cc" "clobber")]
  16650. +)
  16651. +
  16652. +(define_peephole
  16653. + [(set (match_operand:SI 2 "register_operand" "")
  16654. + (match_operand 1 "avr32_const_pool_ref_operand" ""))
  16655. + (parallel [(set (match_operand 0 "register_operand" "")
  16656. + (call (mem:SI (match_dup 2))
  16657. + (match_operand 3 "" "")))
  16658. + (clobber (reg:SI LR_REGNUM))])]
  16659. + "dead_or_set_p(insn, operands[2])"
  16660. + "mcall %1"
  16661. + [(set_attr "type" "call")
  16662. + (set_attr "length" "4")
  16663. + (set_attr "cc" "call_set")]
  16664. +)
  16665. +
  16666. +
  16667. +(define_peephole2
  16668. + [(set (match_operand:SI 0 "register_operand" "")
  16669. + (match_operand 1 "avr32_const_pool_ref_operand" ""))
  16670. + (parallel [(call (mem:SI (match_dup 0))
  16671. + (match_operand 2 "" ""))
  16672. + (clobber (reg:SI LR_REGNUM))])]
  16673. + "peep2_reg_dead_p(2, operands[0])"
  16674. + [(parallel [(call (mem:SI (match_dup 1))
  16675. + (match_dup 2))
  16676. + (clobber (reg:SI LR_REGNUM))])]
  16677. + ""
  16678. +)
  16679. +
  16680. +(define_peephole2
  16681. + [(set (match_operand:SI 0 "register_operand" "")
  16682. + (match_operand 1 "avr32_const_pool_ref_operand" ""))
  16683. + (parallel [(set (match_operand 2 "register_operand" "")
  16684. + (call (mem:SI (match_dup 0))
  16685. + (match_operand 3 "" "")))
  16686. + (clobber (reg:SI LR_REGNUM))])]
  16687. + "(peep2_reg_dead_p(2, operands[0]) || (REGNO(operands[2]) == REGNO(operands[0])))"
  16688. + [(parallel [(set (match_dup 2)
  16689. + (call (mem:SI (match_dup 1))
  16690. + (match_dup 3)))
  16691. + (clobber (reg:SI LR_REGNUM))])]
  16692. + ""
  16693. +)
  16694. +
  16695. +;;=================================================================
  16696. +;; Returning a value
  16697. +;;=================================================================
  16698. +
  16699. +
  16700. +(define_peephole
  16701. + [(set (match_operand 0 "register_operand" "")
  16702. + (match_operand 1 "register_operand" ""))
  16703. + (return)]
  16704. + "USE_RETURN_INSN (TRUE) && (REGNO(operands[0]) == RETVAL_REGNUM)
  16705. + && (REGNO(operands[1]) != LR_REGNUM)
  16706. + && (REGNO_REG_CLASS(REGNO(operands[1])) == GENERAL_REGS)"
  16707. + "retal %1"
  16708. + [(set_attr "type" "call")
  16709. + (set_attr "length" "2")]
  16710. + )
  16711. +
  16712. +
  16713. +(define_peephole
  16714. + [(set (match_operand 0 "register_operand" "r")
  16715. + (match_operand 1 "immediate_operand" "i"))
  16716. + (return)]
  16717. + "(USE_RETURN_INSN (FALSE) && (REGNO(operands[0]) == RETVAL_REGNUM) &&
  16718. + ((INTVAL(operands[1]) == -1) || (INTVAL(operands[1]) == 0) || (INTVAL(operands[1]) == 1)))"
  16719. + {
  16720. + avr32_output_return_instruction (TRUE, FALSE, NULL, operands[1]);
  16721. + return "";
  16722. + }
  16723. + [(set_attr "type" "call")
  16724. + (set_attr "length" "4")]
  16725. + )
  16726. +
  16727. +(define_peephole
  16728. + [(set (match_operand 0 "register_operand" "r")
  16729. + (match_operand 1 "immediate_operand" "i"))
  16730. + (unspec_volatile [(return)] VUNSPEC_EPILOGUE)]
  16731. + "(REGNO(operands[0]) == RETVAL_REGNUM) &&
  16732. + ((INTVAL(operands[1]) == -1) || (INTVAL(operands[1]) == 0) || (INTVAL(operands[1]) == 1))"
  16733. + {
  16734. + avr32_output_return_instruction (FALSE, FALSE, NULL, operands[1]);
  16735. + return "";
  16736. + }
  16737. + ; Length is absolute worst case
  16738. + [(set_attr "type" "branch")
  16739. + (set_attr "length" "12")]
  16740. + )
  16741. +
  16742. +(define_peephole
  16743. + [(set (match_operand 0 "register_operand" "=r")
  16744. + (if_then_else (match_operator 1 "avr32_comparison_operator"
  16745. + [(match_operand 4 "register_operand" "r")
  16746. + (match_operand 5 "register_immediate_operand" "rKs21")])
  16747. + (match_operand 2 "avr32_cond_register_immediate_operand" "rKs08")
  16748. + (match_operand 3 "avr32_cond_register_immediate_operand" "rKs08")))
  16749. + (return)]
  16750. + "USE_RETURN_INSN (TRUE) && (REGNO(operands[0]) == RETVAL_REGNUM)"
  16751. + {
  16752. + operands[1] = avr32_output_cmp(operands[1], GET_MODE(operands[4]), operands[4], operands[5]);
  16753. +
  16754. + if ( GET_CODE(operands[2]) == REG
  16755. + && GET_CODE(operands[3]) == REG
  16756. + && REGNO(operands[2]) != LR_REGNUM
  16757. + && REGNO(operands[3]) != LR_REGNUM ){
  16758. + return "ret%1 %2\;ret%i1 %3";
  16759. + } else if ( GET_CODE(operands[2]) == REG
  16760. + && GET_CODE(operands[3]) == CONST_INT ){
  16761. + if ( INTVAL(operands[3]) == -1
  16762. + || INTVAL(operands[3]) == 0
  16763. + || INTVAL(operands[3]) == 1 ){
  16764. + return "ret%1 %2\;ret%i1 %d3";
  16765. + } else {
  16766. + return "mov%1 r12, %2\;mov%i1 r12, %3\;retal r12";
  16767. + }
  16768. + } else if ( GET_CODE(operands[2]) == CONST_INT
  16769. + && GET_CODE(operands[3]) == REG ){
  16770. + if ( INTVAL(operands[2]) == -1
  16771. + || INTVAL(operands[2]) == 0
  16772. + || INTVAL(operands[2]) == 1 ){
  16773. + return "ret%1 %d2\;ret%i1 %3";
  16774. + } else {
  16775. + return "mov%1 r12, %2\;mov%i1 r12, %3\;retal r12";
  16776. + }
  16777. + } else {
  16778. + if ( (INTVAL(operands[2]) == -1
  16779. + || INTVAL(operands[2]) == 0
  16780. + || INTVAL(operands[2]) == 1 )
  16781. + && (INTVAL(operands[3]) == -1
  16782. + || INTVAL(operands[3]) == 0
  16783. + || INTVAL(operands[3]) == 1 )){
  16784. + return "ret%1 %d2\;ret%i1 %d3";
  16785. + } else {
  16786. + return "mov%1 r12, %2\;mov%i1 r12, %3\;retal r12";
  16787. + }
  16788. + }
  16789. + }
  16790. +
  16791. + [(set_attr "length" "10")
  16792. + (set_attr "cc" "none")
  16793. + (set_attr "type" "call")])
  16794. +
  16795. +
  16796. +
  16797. +;;=================================================================
  16798. +;; mulnhh.w
  16799. +;;=================================================================
  16800. +
  16801. +(define_peephole2
  16802. + [(set (match_operand:HI 0 "register_operand" "")
  16803. + (neg:HI (match_operand:HI 1 "register_operand" "")))
  16804. + (set (match_operand:SI 2 "register_operand" "")
  16805. + (mult:SI
  16806. + (sign_extend:SI (match_dup 0))
  16807. + (sign_extend:SI (match_operand:HI 3 "register_operand" ""))))]
  16808. + "(peep2_reg_dead_p(2, operands[0])) || (REGNO(operands[2]) == REGNO(operands[0]))"
  16809. + [ (set (match_dup 2)
  16810. + (mult:SI
  16811. + (sign_extend:SI (neg:HI (match_dup 1)))
  16812. + (sign_extend:SI (match_dup 3))))]
  16813. + ""
  16814. + )
  16815. +
  16816. +(define_peephole2
  16817. + [(set (match_operand:HI 0 "register_operand" "")
  16818. + (neg:HI (match_operand:HI 1 "register_operand" "")))
  16819. + (set (match_operand:SI 2 "register_operand" "")
  16820. + (mult:SI
  16821. + (sign_extend:SI (match_operand:HI 3 "register_operand" ""))
  16822. + (sign_extend:SI (match_dup 0))))]
  16823. + "(peep2_reg_dead_p(2, operands[0])) || (REGNO(operands[2]) == REGNO(operands[0]))"
  16824. + [ (set (match_dup 2)
  16825. + (mult:SI
  16826. + (sign_extend:SI (neg:HI (match_dup 1)))
  16827. + (sign_extend:SI (match_dup 3))))]
  16828. + ""
  16829. + )
  16830. +
  16831. +
  16832. +
  16833. +;;=================================================================
  16834. +;; Vector set and extract operations
  16835. +;;=================================================================
  16836. +(define_insn "vec_setv2hi_hi"
  16837. + [(set (match_operand:V2HI 0 "register_operand" "=r")
  16838. + (vec_merge:V2HI
  16839. + (match_dup 0)
  16840. + (vec_duplicate:V2HI
  16841. + (match_operand:HI 1 "register_operand" "r"))
  16842. + (const_int 1)))]
  16843. + ""
  16844. + "bfins\t%0, %1, 16, 16"
  16845. + [(set_attr "type" "alu")
  16846. + (set_attr "length" "4")
  16847. + (set_attr "cc" "clobber")])
  16848. +
  16849. +(define_insn "vec_setv2hi_lo"
  16850. + [(set (match_operand:V2HI 0 "register_operand" "+r")
  16851. + (vec_merge:V2HI
  16852. + (match_dup 0)
  16853. + (vec_duplicate:V2HI
  16854. + (match_operand:HI 1 "register_operand" "r"))
  16855. + (const_int 2)))]
  16856. + ""
  16857. + "bfins\t%0, %1, 0, 16"
  16858. + [(set_attr "type" "alu")
  16859. + (set_attr "length" "4")
  16860. + (set_attr "cc" "clobber")])
  16861. +
  16862. +(define_expand "vec_setv2hi"
  16863. + [(set (match_operand:V2HI 0 "register_operand" "")
  16864. + (vec_merge:V2HI
  16865. + (match_dup 0)
  16866. + (vec_duplicate:V2HI
  16867. + (match_operand:HI 1 "register_operand" ""))
  16868. + (match_operand 2 "immediate_operand" "")))]
  16869. + ""
  16870. + { operands[2] = GEN_INT(INTVAL(operands[2]) + 1); }
  16871. + )
  16872. +
  16873. +(define_insn "vec_extractv2hi"
  16874. + [(set (match_operand:HI 0 "register_operand" "=r")
  16875. + (vec_select:HI
  16876. + (match_operand:V2HI 1 "register_operand" "r")
  16877. + (parallel [(match_operand:SI 2 "immediate_operand" "i")])))]
  16878. + ""
  16879. + {
  16880. + if ( INTVAL(operands[2]) == 0 )
  16881. + return "bfextu\t%0, %1, 16, 16";
  16882. + else
  16883. + return "bfextu\t%0, %1, 0, 16";
  16884. + }
  16885. + [(set_attr "type" "alu")
  16886. + (set_attr "length" "4")
  16887. + (set_attr "cc" "clobber")])
  16888. +
  16889. +(define_insn "vec_extractv4qi"
  16890. + [(set (match_operand:QI 0 "register_operand" "=r")
  16891. + (vec_select:QI
  16892. + (match_operand:V4QI 1 "register_operand" "r")
  16893. + (parallel [(match_operand:SI 2 "immediate_operand" "i")])))]
  16894. + ""
  16895. + {
  16896. + switch ( INTVAL(operands[2]) ){
  16897. + case 0:
  16898. + return "bfextu\t%0, %1, 24, 8";
  16899. + case 1:
  16900. + return "bfextu\t%0, %1, 16, 8";
  16901. + case 2:
  16902. + return "bfextu\t%0, %1, 8, 8";
  16903. + case 3:
  16904. + return "bfextu\t%0, %1, 0, 8";
  16905. + default:
  16906. + abort();
  16907. + }
  16908. + }
  16909. + [(set_attr "type" "alu")
  16910. + (set_attr "length" "4")
  16911. + (set_attr "cc" "clobber")])
  16912. +
  16913. +
  16914. +(define_insn "concatv2hi"
  16915. + [(set (match_operand:V2HI 0 "register_operand" "=r, r, r")
  16916. + (vec_concat:V2HI
  16917. + (match_operand:HI 1 "register_operand" "r, r, 0")
  16918. + (match_operand:HI 2 "register_operand" "r, 0, r")))]
  16919. + ""
  16920. + "@
  16921. + mov\t%0, %1\;bfins\t%0, %2, 0, 16
  16922. + bfins\t%0, %2, 0, 16
  16923. + bfins\t%0, %1, 16, 16"
  16924. + [(set_attr "length" "6, 4, 4")
  16925. + (set_attr "type" "alu")])
  16926. +
  16927. +
  16928. +;; Load the atomic operation description
  16929. +(include "sync.md")
  16930. +
  16931. +;; Load the SIMD description
  16932. +(include "simd.md")
  16933. +
  16934. +;; Include the FPU for uc3
  16935. +(include "uc3fpu.md")
  16936. diff -Nur gcc-4.4.6.orig/gcc/config/avr32/avr32.opt gcc-4.4.6/gcc/config/avr32/avr32.opt
  16937. --- gcc-4.4.6.orig/gcc/config/avr32/avr32.opt 1970-01-01 01:00:00.000000000 +0100
  16938. +++ gcc-4.4.6/gcc/config/avr32/avr32.opt 2011-10-22 19:23:08.524581303 +0200
  16939. @@ -0,0 +1,93 @@
  16940. +; Options for the ATMEL AVR32 port of the compiler.
  16941. +
  16942. +; Copyright 2007 Atmel Corporation.
  16943. +;
  16944. +; This file is part of GCC.
  16945. +;
  16946. +; GCC is free software; you can redistribute it and/or modify it under
  16947. +; the terms of the GNU General Public License as published by the Free
  16948. +; Software Foundation; either version 2, or (at your option) any later
  16949. +; version.
  16950. +;
  16951. +; GCC is distributed in the hope that it will be useful, but WITHOUT ANY
  16952. +; WARRANTY; without even the implied warranty of MERCHANTABILITY or
  16953. +; FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
  16954. +; for more details.
  16955. +;
  16956. +; You should have received a copy of the GNU General Public License
  16957. +; along with GCC; see the file COPYING. If not, write to the Free
  16958. +; Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
  16959. +; 02110-1301, USA.
  16960. +
  16961. +muse-rodata-section
  16962. +Target Report Mask(USE_RODATA_SECTION)
  16963. +Use section .rodata for read-only data instead of .text.
  16964. +
  16965. +mhard-float
  16966. +Target Report Mask(HARD_FLOAT)
  16967. +Use FPU instructions instead of floating point emulation.
  16968. +
  16969. +msoft-float
  16970. +Target Report InverseMask(HARD_FLOAT, SOFT_FLOAT)
  16971. +Use floating point emulation for floating point operations.
  16972. +
  16973. +mforce-double-align
  16974. +Target Report RejectNegative Mask(FORCE_DOUBLE_ALIGN)
  16975. +Force double-word alignment for double-word memory accesses.
  16976. +
  16977. +mno-init-got
  16978. +Target Report RejectNegative Mask(NO_INIT_GOT)
  16979. +Do not initialize GOT register before using it when compiling PIC code.
  16980. +
  16981. +mrelax
  16982. +Target Report Mask(RELAX)
  16983. +Let invoked assembler and linker do relaxing (Enabled by default when optimization level is >1).
  16984. +
  16985. +mmd-reorg-opt
  16986. +Target Report Undocumented Mask(MD_REORG_OPTIMIZATION)
  16987. +Perform machine dependent optimizations in reorg stage.
  16988. +
  16989. +masm-addr-pseudos
  16990. +Target Report Mask(HAS_ASM_ADDR_PSEUDOS)
  16991. +Use assembler pseudo-instructions lda.w and call for handling direct addresses. (Enabled by default)
  16992. +
  16993. +mpart=
  16994. +Target Report RejectNegative Joined Var(avr32_part_name)
  16995. +Specify the AVR32 part name
  16996. +
  16997. +mcpu=
  16998. +Target Report RejectNegative Joined Undocumented Var(avr32_part_name)
  16999. +Specify the AVR32 part name (deprecated)
  17000. +
  17001. +march=
  17002. +Target Report RejectNegative Joined Var(avr32_arch_name)
  17003. +Specify the AVR32 architecture name
  17004. +
  17005. +mfast-float
  17006. +Target Report Mask(FAST_FLOAT)
  17007. +Enable fast floating-point library. Enabled by default if the -funsafe-math-optimizations switch is specified.
  17008. +
  17009. +mimm-in-const-pool
  17010. +Target Report Var(avr32_imm_in_const_pool) Init(-1)
  17011. +Put large immediates in constant pool. This is enabled by default for archs with insn-cache.
  17012. +
  17013. +mno-pic
  17014. +Target Report RejectNegative Mask(NO_PIC)
  17015. +Do not generate position-independent code. (deprecated, use -fno-pic instead)
  17016. +
  17017. +mcond-exec-before-reload
  17018. +Target Report Undocumented Mask(COND_EXEC_BEFORE_RELOAD)
  17019. +Enable experimental conditional execution preparation before the reload stage.
  17020. +
  17021. +mrmw-addressable-data
  17022. +Target Report Mask(RMW_ADDRESSABLE_DATA)
  17023. +Signal that all data is in range for the Atomic Read-Modify-Write memory instructions, and that
  17024. +gcc can safely generate these whenever possible.
  17025. +
  17026. +mflashvault
  17027. +Target Var(TARGET_FLASHVAULT)
  17028. +Generate code for flashvault
  17029. +
  17030. +mlist-devices
  17031. +Target RejectNegative Var(avr32_list_supported_parts)
  17032. +Print the list of parts supported while printing --target-help.
  17033. diff -Nur gcc-4.4.6.orig/gcc/config/avr32/crti.asm gcc-4.4.6/gcc/config/avr32/crti.asm
  17034. --- gcc-4.4.6.orig/gcc/config/avr32/crti.asm 1970-01-01 01:00:00.000000000 +0100
  17035. +++ gcc-4.4.6/gcc/config/avr32/crti.asm 2011-10-22 19:23:08.524581303 +0200
  17036. @@ -0,0 +1,64 @@
  17037. +/*
  17038. + Init/fini stuff for AVR32.
  17039. + Copyright 2003-2006 Atmel Corporation.
  17040. +
  17041. + Written by Ronny Pedersen, Atmel Norway, <rpedersen@atmel.com>
  17042. +
  17043. + This file is part of GCC.
  17044. +
  17045. + This program is free software; you can redistribute it and/or modify
  17046. + it under the terms of the GNU General Public License as published by
  17047. + the Free Software Foundation; either version 2 of the License, or
  17048. + (at your option) any later version.
  17049. +
  17050. + This program is distributed in the hope that it will be useful,
  17051. + but WITHOUT ANY WARRANTY; without even the implied warranty of
  17052. + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  17053. + GNU General Public License for more details.
  17054. +
  17055. + You should have received a copy of the GNU General Public License
  17056. + along with this program; if not, write to the Free Software
  17057. + Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. */
  17058. +
  17059. +
  17060. +/* The code in sections .init and .fini is supposed to be a single
  17061. + regular function. The function in .init is called directly from
  17062. + start in crt1.asm. The function in .fini is atexit()ed in crt1.asm
  17063. + too.
  17064. +
  17065. + crti.asm contributes the prologue of a function to these sections,
  17066. + and crtn.asm comes up the epilogue. STARTFILE_SPEC should list
  17067. + crti.o before any other object files that might add code to .init
  17068. + or .fini sections, and ENDFILE_SPEC should list crtn.o after any
  17069. + such object files. */
  17070. +
  17071. + .file "crti.asm"
  17072. +
  17073. + .section ".init"
  17074. +/* Just load the GOT */
  17075. + .align 2
  17076. + .global _init
  17077. +_init:
  17078. + stm --sp, r6, lr
  17079. + lddpc r6, 1f
  17080. +0:
  17081. + rsub r6, pc
  17082. + rjmp 2f
  17083. + .align 2
  17084. +1: .long 0b - _GLOBAL_OFFSET_TABLE_
  17085. +2:
  17086. +
  17087. + .section ".fini"
  17088. +/* Just load the GOT */
  17089. + .align 2
  17090. + .global _fini
  17091. +_fini:
  17092. + stm --sp, r6, lr
  17093. + lddpc r6, 1f
  17094. +0:
  17095. + rsub r6, pc
  17096. + rjmp 2f
  17097. + .align 2
  17098. +1: .long 0b - _GLOBAL_OFFSET_TABLE_
  17099. +2:
  17100. +
  17101. diff -Nur gcc-4.4.6.orig/gcc/config/avr32/crtn.asm gcc-4.4.6/gcc/config/avr32/crtn.asm
  17102. --- gcc-4.4.6.orig/gcc/config/avr32/crtn.asm 1970-01-01 01:00:00.000000000 +0100
  17103. +++ gcc-4.4.6/gcc/config/avr32/crtn.asm 2011-10-22 19:23:08.524581303 +0200
  17104. @@ -0,0 +1,44 @@
  17105. +/* Copyright (C) 2001 Free Software Foundation, Inc.
  17106. + Written By Nick Clifton
  17107. +
  17108. + This file is free software; you can redistribute it and/or modify it
  17109. + under the terms of the GNU General Public License as published by the
  17110. + Free Software Foundation; either version 2, or (at your option) any
  17111. + later version.
  17112. +
  17113. + In addition to the permissions in the GNU General Public License, the
  17114. + Free Software Foundation gives you unlimited permission to link the
  17115. + compiled version of this file with other programs, and to distribute
  17116. + those programs without any restriction coming from the use of this
  17117. + file. (The General Public License restrictions do apply in other
  17118. + respects; for example, they cover modification of the file, and
  17119. + distribution when not linked into another program.)
  17120. +
  17121. + This file is distributed in the hope that it will be useful, but
  17122. + WITHOUT ANY WARRANTY; without even the implied warranty of
  17123. + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
  17124. + General Public License for more details.
  17125. +
  17126. + You should have received a copy of the GNU General Public License
  17127. + along with this program; see the file COPYING. If not, write to
  17128. + the Free Software Foundation, 59 Temple Place - Suite 330,
  17129. + Boston, MA 02111-1307, USA.
  17130. +
  17131. + As a special exception, if you link this library with files
  17132. + compiled with GCC to produce an executable, this does not cause
  17133. + the resulting executable to be covered by the GNU General Public License.
  17134. + This exception does not however invalidate any other reasons why
  17135. + the executable file might be covered by the GNU General Public License.
  17136. +*/
  17137. +
  17138. +
  17139. +
  17140. +
  17141. + .file "crtn.asm"
  17142. +
  17143. + .section ".init"
  17144. + ldm sp++, r6, pc
  17145. +
  17146. + .section ".fini"
  17147. + ldm sp++, r6, pc
  17148. +
  17149. diff -Nur gcc-4.4.6.orig/gcc/config/avr32/lib1funcs.S gcc-4.4.6/gcc/config/avr32/lib1funcs.S
  17150. --- gcc-4.4.6.orig/gcc/config/avr32/lib1funcs.S 1970-01-01 01:00:00.000000000 +0100
  17151. +++ gcc-4.4.6/gcc/config/avr32/lib1funcs.S 2011-10-22 19:23:08.524581303 +0200
  17152. @@ -0,0 +1,2902 @@
  17153. +/* Macro for moving immediate value to register. */
  17154. +.macro mov_imm reg, imm
  17155. +.if (((\imm & 0xfffff) == \imm) || ((\imm | 0xfff00000) == \imm))
  17156. + mov \reg, \imm
  17157. +#if __AVR32_UC__ >= 2
  17158. +.elseif ((\imm & 0xffff) == 0)
  17159. + movh \reg, hi(\imm)
  17160. +
  17161. +#endif
  17162. +.else
  17163. + mov \reg, lo(\imm)
  17164. + orh \reg, hi(\imm)
  17165. +.endif
  17166. +.endm
  17167. +
  17168. +
  17169. +
  17170. +/* Adjust the unpacked double number if it is a subnormal number.
  17171. + The exponent and mantissa pair are stored
  17172. + in [mant_hi,mant_lo] and [exp]. A register with the correct sign bit in
  17173. + the MSB is passed in [sign]. Needs two scratch
  17174. + registers [scratch1] and [scratch2]. An adjusted and packed double float
  17175. + is present in [mant_hi,mant_lo] after macro has executed */
  17176. +.macro adjust_subnormal_df exp, mant_lo, mant_hi, sign, scratch1, scratch2
  17177. + /* We have an exponent which is <=0 indicating a subnormal number
  17178. + As it should be stored as if the exponent was 1 (although the
  17179. + exponent field is all zeros to indicate a subnormal number)
  17180. + we have to shift down the mantissa to its correct position. */
  17181. + neg \exp
  17182. + sub \exp,-1 /* amount to shift down */
  17183. + cp.w \exp,54
  17184. + brlo 50f /* if more than 53 shift steps, the
  17185. + entire mantissa will disappear
  17186. + without any rounding to occur */
  17187. + mov \mant_hi, 0
  17188. + mov \mant_lo, 0
  17189. + rjmp 52f
  17190. +50:
  17191. + sub \exp,-10 /* do the shift to position the
  17192. + mantissa at the same time
  17193. + note! this does not include the
  17194. + final 1 step shift to add the sign */
  17195. +
  17196. + /* when shifting, save all shifted out bits in [scratch2]. we may need to
  17197. + look at them to make correct rounding. */
  17198. +
  17199. + rsub \scratch1,\exp,32 /* get inverted shift count */
  17200. + cp.w \exp,32 /* handle shifts >= 32 separately */
  17201. + brhs 51f
  17202. +
  17203. + /* small (<32) shift amount, both words are part of the shift */
  17204. + lsl \scratch2,\mant_lo,\scratch1 /* save bits to shift out from lsw*/
  17205. + lsl \scratch1,\mant_hi,\scratch1 /* get bits from msw destined for lsw*/
  17206. + lsr \mant_lo,\mant_lo,\exp /* shift down lsw */
  17207. + lsr \mant_hi,\mant_hi,\exp /* shift down msw */
  17208. + or \mant_hi,\scratch1 /* add bits from msw with prepared lsw */
  17209. + rjmp 50f
  17210. +
  17211. + /* large (>=32) shift amount, only lsw will have bits left after shift.
  17212. + note that shift operations will use ((shift count) mod 32) so
  17213. + we do not need to subtract 32 from shift count. */
  17214. +51:
  17215. + lsl \scratch2,\mant_hi,\scratch1 /* save bits to shift out from msw */
  17216. + or \scratch2,\mant_lo /* also save all bits from lsw */
  17217. + mov \mant_lo,\mant_hi /* msw -> lsw (i.e. "shift 32 first") */
  17218. + mov \mant_hi,0 /* clear msw */
  17219. + lsr \mant_lo,\mant_lo,\exp /* make rest of shift inside lsw */
  17220. +
  17221. +50:
  17222. + /* result is almost ready to return, except that least significant bit
  17223. + and the part we already shifted out may cause the result to be
  17224. + rounded */
  17225. + bld \mant_lo,0 /* get bit to be shifted out */
  17226. + brcc 51f /* if bit was 0, no rounding */
  17227. +
  17228. + /* msb of part to remove is 1, so rounding depends on rest of bits */
  17229. + tst \scratch2,\scratch2 /* get shifted out tail */
  17230. + brne 50f /* if rest > 0, do round */
  17231. + bld \mant_lo,1 /* we have to look at lsb in result */
  17232. + brcc 51f /* if lsb is 0, don't round */
  17233. +
  17234. +50:
  17235. + /* subnormal result requires rounding
  17236. + rounding may cause subnormal to become smallest normal number
  17237. + luckily, smallest normal number has exactly the representation
  17238. + we got by rippling a one bit up from mantissa into exponent field. */
  17239. + sub \mant_lo,-1
  17240. + subcc \mant_hi,-1
  17241. +
  17242. +51:
  17243. + /* shift and return packed double with correct sign */
  17244. + rol \sign
  17245. + ror \mant_hi
  17246. + ror \mant_lo
  17247. +52:
  17248. +.endm
  17249. +
  17250. +
  17251. +/* Adjust subnormal single float number with exponent [exp]
  17252. + and mantissa [mant] and round. */
  17253. +.macro adjust_subnormal_sf sf, exp, mant, sign, scratch
  17254. + /* subnormal number */
  17255. + rsub \exp,\exp, 1 /* shift amount */
  17256. + cp.w \exp, 25
  17257. + movhs \mant, 0
  17258. + brhs 90f /* Return zero */
  17259. + rsub \scratch, \exp, 32
  17260. + lsl \scratch, \mant,\scratch/* Check if there are any bits set
  17261. + in the bits discarded in the mantissa */
  17262. + srne \scratch /* If so set the lsb of the shifted mantissa */
  17263. + lsr \mant,\mant,\exp /* Shift the mantissa */
  17264. + or \mant, \scratch /* Round lsb if any bits were shifted out */
  17265. + /* Rounding : For explaination, see round_sf. */
  17266. + mov \scratch, 0x7f /* Set rounding constant */
  17267. + bld \mant, 8
  17268. + subeq \scratch, -1 /* For odd numbers use rounding constant 0x80 */
  17269. + add \mant, \scratch /* Add rounding constant to mantissa */
  17270. + /* We can't overflow because mantissa is at least shifted one position
  17271. + to the right so the implicit bit is zero. We can however get the implicit
  17272. + bit set after rounding which means that we have the lowest normal number
  17273. + but this is ok since this bit has the same position as the LSB of the
  17274. + exponent */
  17275. + lsr \sf, \mant, 7
  17276. + /* Rotate in sign */
  17277. + lsl \sign, 1
  17278. + ror \sf
  17279. +90:
  17280. +.endm
  17281. +
  17282. +
  17283. +/* Round the unpacked df number with exponent [exp] and
  17284. + mantissa [mant_hi, mant_lo]. Uses scratch register
  17285. + [scratch] */
  17286. +.macro round_df exp, mant_lo, mant_hi, scratch
  17287. + mov \scratch, 0x3ff /* Rounding constant */
  17288. + bld \mant_lo,11 /* Check if lsb in the final result is
  17289. + set */
  17290. + subeq \scratch, -1 /* Adjust rounding constant to 0x400
  17291. + if rounding 0.5 upwards */
  17292. + add \mant_lo, \scratch /* Round */
  17293. + acr \mant_hi /* If overflowing we know that
  17294. + we have all zeros in the bits not
  17295. + scaled out so we can leave them
  17296. + but we must increase the exponent with
  17297. + two since we had an implicit bit
  17298. + which is lost + the extra overflow bit */
  17299. + subcs \exp, -2 /* Update exponent */
  17300. +.endm
  17301. +
  17302. +/* Round single float number stored in [mant] and [exp] */
  17303. +.macro round_sf exp, mant, scratch
  17304. + /* Round:
  17305. + For 0.5 we round to nearest even integer
  17306. + for all other cases we round to nearest integer.
  17307. + This means that if the digit left of the "point" (.)
  17308. + is 1 we can add 0x80 to the mantissa since the
  17309. + corner case 0x180 will round up to 0x200. If the
  17310. + digit left of the "point" is 0 we will have to
  17311. + add 0x7f since this will give 0xff and hence a
  17312. + truncation/rounding downwards for the corner
  17313. + case when the 9 lowest bits are 0x080 */
  17314. + mov \scratch, 0x7f /* Set rounding constant */
  17315. + /* Check if the mantissa is even or odd */
  17316. + bld \mant, 8
  17317. + subeq \scratch, -1 /* Rounding constant should be 0x80 */
  17318. + add \mant, \scratch
  17319. + subcs \exp, -2 /* Adjust exponent if we overflowed */
  17320. +.endm
  17321. +
  17322. +
  17323. +
  17324. +/* Pack a single float number stored in [mant] and [exp]
  17325. + into a single float number in [sf] */
  17326. +.macro pack_sf sf, exp, mant
  17327. + bld \mant,31 /* implicit bit to z */
  17328. + subne \exp,1 /* if subnormal (implicit bit 0)
  17329. + adjust exponent to storage format */
  17330. +
  17331. + lsr \sf, \mant, 7
  17332. + bfins \sf, \exp, 24, 8
  17333. +.endm
  17334. +
  17335. +/* Pack exponent [exp] and mantissa [mant_hi, mant_lo]
  17336. + into [df_hi, df_lo]. [df_hi] is shifted
  17337. + one bit up so the sign bit can be shifted into it */
  17338. +
  17339. +.macro pack_df exp, mant_lo, mant_hi, df_lo, df_hi
  17340. + bld \mant_hi,31 /* implicit bit to z */
  17341. + subne \exp,1 /* if subnormal (implicit bit 0)
  17342. + adjust exponent to storage format */
  17343. +
  17344. + lsr \mant_lo,11 /* shift back lsw */
  17345. + or \df_lo,\mant_lo,\mant_hi<<21 /* combine with low bits from msw */
  17346. + lsl \mant_hi,1 /* get rid of implicit bit */
  17347. + lsr \mant_hi,11 /* shift back msw except for one step*/
  17348. + or \df_hi,\mant_hi,\exp<<21 /* combine msw with exponent */
  17349. +.endm
  17350. +
  17351. +/* Normalize single float number stored in [mant] and [exp]
  17352. + using scratch register [scratch] */
  17353. +.macro normalize_sf exp, mant, scratch
  17354. + /* Adjust exponent and mantissa */
  17355. + clz \scratch, \mant
  17356. + sub \exp, \scratch
  17357. + lsl \mant, \mant, \scratch
  17358. +.endm
  17359. +
  17360. +/* Normalize the exponent and mantissa pair stored
  17361. + in [mant_hi,mant_lo] and [exp]. Needs two scratch
  17362. + registers [scratch1] and [scratch2]. */
  17363. +.macro normalize_df exp, mant_lo, mant_hi, scratch1, scratch2
  17364. + clz \scratch1,\mant_hi /* Check if we have zeros in high bits */
  17365. + breq 80f /* No need for scaling if no zeros in high bits */
  17366. + brcs 81f /* Check for all zeros */
  17367. +
  17368. + /* shift amount is smaller than 32, and involves both msw and lsw*/
  17369. + rsub \scratch2,\scratch1,32 /* shift mantissa */
  17370. + lsl \mant_hi,\mant_hi,\scratch1
  17371. + lsr \scratch2,\mant_lo,\scratch2
  17372. + or \mant_hi,\scratch2
  17373. + lsl \mant_lo,\mant_lo,\scratch1
  17374. + sub \exp,\scratch1 /* adjust exponent */
  17375. + rjmp 80f /* Finished */
  17376. +81:
  17377. + /* shift amount is greater than 32 */
  17378. + clz \scratch1,\mant_lo /* shift mantissa */
  17379. + movcs \scratch1, 0
  17380. + subcc \scratch1,-32
  17381. + lsl \mant_hi,\mant_lo,\scratch1
  17382. + mov \mant_lo,0
  17383. + sub \exp,\scratch1 /* adjust exponent */
  17384. +80:
  17385. +.endm
  17386. +
  17387. +
  17388. +/* Fast but approximate multiply of two 64-bit numbers to give a 64 bit result.
  17389. + The multiplication of [al]x[bl] is discarded.
  17390. + Operands in [ah], [al], [bh], [bl].
  17391. + Scratch registers in [sh], [sl].
  17392. + Returns results in registers [rh], [rl].*/
  17393. +.macro mul_approx_df ah, al, bh, bl, rh, rl, sh, sl
  17394. + mulu.d \sl, \ah, \bl
  17395. + macu.d \sl, \al, \bh
  17396. + mulu.d \rl, \ah, \bh
  17397. + add \rl, \sh
  17398. + acr \rh
  17399. +.endm
  17400. +
  17401. +
  17402. +
  17403. +#if defined(L_avr32_f64_mul) || defined(L_avr32_f64_mul_fast)
  17404. + .align 2
  17405. +#if defined(L_avr32_f64_mul)
  17406. + .global __avr32_f64_mul
  17407. + .type __avr32_f64_mul,@function
  17408. +__avr32_f64_mul:
  17409. +#else
  17410. + .global __avr32_f64_mul_fast
  17411. + .type __avr32_f64_mul_fast,@function
  17412. +__avr32_f64_mul_fast:
  17413. +#endif
  17414. + or r12, r10, r11 << 1
  17415. + breq __avr32_f64_mul_op1_zero
  17416. +
  17417. +#if defined(L_avr32_f64_mul)
  17418. + pushm r4-r7, lr
  17419. +#else
  17420. + stm --sp, r5,r6,r7,lr
  17421. +#endif
  17422. +
  17423. +#define AVR32_F64_MUL_OP1_INT_BITS 1
  17424. +#define AVR32_F64_MUL_OP2_INT_BITS 10
  17425. +#define AVR32_F64_MUL_RES_INT_BITS 11
  17426. +
  17427. + /* op1 in {r11,r10}*/
  17428. + /* op2 in {r9,r8}*/
  17429. + eor lr, r11, r9 /* MSB(lr) = Sign(op1) ^ Sign(op2) */
  17430. +
  17431. + /* Unpack op1 to 1.63 format*/
  17432. + /* exp: r7 */
  17433. + /* sf: r11, r10 */
  17434. + bfextu r7, r11, 20, 11 /* Extract exponent */
  17435. +
  17436. + mov r5, 1
  17437. +
  17438. + /* Check if normalization is needed */
  17439. + breq __avr32_f64_mul_op1_subnormal /*If number is subnormal, normalize it */
  17440. +
  17441. + lsl r11, (12-AVR32_F64_MUL_OP1_INT_BITS-1) /* Extract mantissa, leave room for implicit bit */
  17442. + or r11, r11, r10>>(32-(12-AVR32_F64_MUL_OP1_INT_BITS-1))
  17443. + lsl r10, (12-AVR32_F64_MUL_OP1_INT_BITS-1)
  17444. + bfins r11, r5, 32 - (1 + AVR32_F64_MUL_OP1_INT_BITS), 1 + AVR32_F64_MUL_OP1_INT_BITS /* Insert implicit bit */
  17445. +
  17446. +
  17447. +22:
  17448. + /* Unpack op2 to 10.54 format */
  17449. + /* exp: r6 */
  17450. + /* sf: r9, r8 */
  17451. + bfextu r6, r9, 20, 11 /* Extract exponent */
  17452. +
  17453. + /* Check if normalization is needed */
  17454. + breq __avr32_f64_mul_op2_subnormal /*If number is subnormal, normalize it */
  17455. +
  17456. + lsl r8, 1 /* Extract mantissa, leave room for implicit bit */
  17457. + rol r9
  17458. + bfins r9, r5, 32 - (1 + AVR32_F64_MUL_OP2_INT_BITS), 1 + AVR32_F64_MUL_OP2_INT_BITS /* Insert implicit bit */
  17459. +
  17460. +23:
  17461. +
  17462. + /* Check if any operands are NaN or INF */
  17463. + cp r7, 0x7ff
  17464. + breq __avr32_f64_mul_op_nan_or_inf /* Check op1 for NaN or Inf */
  17465. + cp r6, 0x7ff
  17466. + breq __avr32_f64_mul_op_nan_or_inf /* Check op2 for NaN or Inf */
  17467. +
  17468. +
  17469. + /* Calculate new exponent in r12*/
  17470. + add r12, r7, r6
  17471. + sub r12, (1023-1)
  17472. +
  17473. +#if defined(L_avr32_f64_mul)
  17474. + /* Do the multiplication.
  17475. + Place result in [r11, r10, r7, r6]. The result is in 11.117 format. */
  17476. + mulu.d r4, r11, r8
  17477. + macu.d r4, r10, r9
  17478. + mulu.d r6, r10, r8
  17479. + mulu.d r10, r11, r9
  17480. + add r7, r4
  17481. + adc r10, r10, r5
  17482. + acr r11
  17483. +#else
  17484. + /* Do the multiplication using approximate calculation. discard the al x bl
  17485. + calculation.
  17486. + Place result in [r11, r10, r7]. The result is in 11.85 format. */
  17487. +
  17488. + /* Do the multiplication using approximate calculation.
  17489. + Place result in r11, r10. Use r7, r6 as scratch registers */
  17490. + mulu.d r6, r11, r8
  17491. + macu.d r6, r10, r9
  17492. + mulu.d r10, r11, r9
  17493. + add r10, r7
  17494. + acr r11
  17495. +#endif
  17496. + /* Adjust exponent and mantissa */
  17497. + /* [r12]:exp, [r11, r10]:mant [r7, r6]:sticky bits */
  17498. + /* Mantissa may be of the format 00000000000.0xxx or 00000000000.1xxx. */
  17499. + /* In the first case, shift one pos to left.*/
  17500. + bld r11, 32-AVR32_F64_MUL_RES_INT_BITS-1
  17501. + breq 0f
  17502. + lsl r7, 1
  17503. + rol r10
  17504. + rol r11
  17505. + sub r12, 1
  17506. +0:
  17507. + cp r12, 0
  17508. + brle __avr32_f64_mul_res_subnormal /*Result was subnormal.*/
  17509. +
  17510. + /* Check for Inf. */
  17511. + cp.w r12, 0x7ff
  17512. + brge __avr32_f64_mul_res_inf
  17513. +
  17514. + /* Insert exponent. */
  17515. + bfins r11, r12, 20, 11
  17516. +
  17517. + /* Result was not subnormal. Perform rounding. */
  17518. + /* For the fast version we discard the sticky bits and always round
  17519. + the halfwaycase up. */
  17520. +24:
  17521. +#if defined(L_avr32_f64_mul)
  17522. + or r6, r6, r10 << 31 /* Or in parity bit into stickybits */
  17523. + or r7, r7, r6 >> 1 /* Or together sticky and still make the msb
  17524. + of r7 represent the halfway bit. */
  17525. + eorh r7, 0x8000 /* Toggle halfway bit. */
  17526. + /* We should now round up by adding one for the following cases:
  17527. +
  17528. + halfway sticky|parity round-up
  17529. + 0 x no
  17530. + 1 0 no
  17531. + 1 1 yes
  17532. +
  17533. + Since we have inverted the halfway bit we can use the satu instruction
  17534. + by saturating to 1 bit to implement this.
  17535. + */
  17536. + satu r7 >> 0, 1
  17537. +#else
  17538. + lsr r7, 31
  17539. +#endif
  17540. + add r10, r7
  17541. + acr r11
  17542. +
  17543. + /* Insert sign bit*/
  17544. + bld lr, 31
  17545. + bst r11, 31
  17546. +
  17547. + /* Return result in [r11,r10] */
  17548. +#if defined(L_avr32_f64_mul)
  17549. + popm r4-r7, pc
  17550. +#else
  17551. + ldm sp++, r5, r6, r7,pc
  17552. +#endif
  17553. +
  17554. +
  17555. +__avr32_f64_mul_op1_subnormal:
  17556. + andh r11, 0x000f /* Remove sign bit and exponent */
  17557. + clz r12, r10 /* Count leading zeros in lsw */
  17558. + clz r6, r11 /* Count leading zeros in msw */
  17559. + subcs r12, -32 + AVR32_F64_MUL_OP1_INT_BITS
  17560. + movcs r6, r12
  17561. + subcc r6, AVR32_F64_MUL_OP1_INT_BITS
  17562. + cp.w r6, 32
  17563. + brge 0f
  17564. +
  17565. + /* shifting involves both msw and lsw*/
  17566. + rsub r12, r6, 32 /* shift mantissa */
  17567. + lsl r11, r11, r6
  17568. + lsr r12, r10, r12
  17569. + or r11, r12
  17570. + lsl r10, r10, r6
  17571. + sub r6, 12-AVR32_F64_MUL_OP1_INT_BITS
  17572. + sub r7, r6 /* adjust exponent */
  17573. + rjmp 22b /* Finished */
  17574. +0:
  17575. + /* msw is zero so only need to consider lsw */
  17576. + lsl r11, r10, r6
  17577. + breq __avr32_f64_mul_res_zero
  17578. + mov r10, 0
  17579. + sub r6, 12-AVR32_F64_MUL_OP1_INT_BITS
  17580. + sub r7, r6 /* adjust exponent */
  17581. + rjmp 22b
  17582. +
  17583. +
  17584. +__avr32_f64_mul_op2_subnormal:
  17585. + andh r9, 0x000f /* Remove sign bit and exponent */
  17586. + clz r12, r8 /* Count leading zeros in lsw */
  17587. + clz r5, r9 /* Count leading zeros in msw */
  17588. + subcs r12, -32 + AVR32_F64_MUL_OP2_INT_BITS
  17589. + movcs r5, r12
  17590. + subcc r5, AVR32_F64_MUL_OP2_INT_BITS
  17591. + cp.w r5, 32
  17592. + brge 0f
  17593. +
  17594. + /* shifting involves both msw and lsw*/
  17595. + rsub r12, r5, 32 /* shift mantissa */
  17596. + lsl r9, r9, r5
  17597. + lsr r12, r8, r12
  17598. + or r9, r12
  17599. + lsl r8, r8, r5
  17600. + sub r5, 12 - AVR32_F64_MUL_OP2_INT_BITS
  17601. + sub r6, r5 /* adjust exponent */
  17602. + rjmp 23b /* Finished */
  17603. +0:
  17604. + /* msw is zero so only need to consider lsw */
  17605. + lsl r9, r8, r5
  17606. + breq __avr32_f64_mul_res_zero
  17607. + mov r8, 0
  17608. + sub r5, 12 - AVR32_F64_MUL_OP2_INT_BITS
  17609. + sub r6, r5 /* adjust exponent */
  17610. + rjmp 23b
  17611. +
  17612. +
  17613. +__avr32_f64_mul_op_nan_or_inf:
  17614. + /* Same code for OP1 and OP2*/
  17615. + /* Since we are here, at least one of the OPs were NaN or INF*/
  17616. + andh r9, 0x000f /* Remove sign bit and exponent */
  17617. + andh r11, 0x000f /* Remove sign bit and exponent */
  17618. + /* Merge the regs in each operand to check for zero*/
  17619. + or r11, r10 /* op1 */
  17620. + or r9, r8 /* op2 */
  17621. + /* Check if op1 is NaN or INF */
  17622. + cp r7, 0x7ff
  17623. + brne __avr32_f64_mul_op1_not_naninf
  17624. + /* op1 was NaN or INF.*/
  17625. + cp r11, 0
  17626. + brne __avr32_f64_mul_res_nan /* op1 was NaN. Result will be NaN*/
  17627. + /*op1 was INF. check if op2 is NaN or INF*/
  17628. + cp r6, 0x7ff
  17629. + brne __avr32_f64_mul_res_inf /*op1 was INF, op2 was neither NaN nor INF*/
  17630. + /* op1 is INF, op2 is either NaN or INF*/
  17631. + cp r9, 0
  17632. + breq __avr32_f64_mul_res_inf /*op2 was also INF*/
  17633. + rjmp __avr32_f64_mul_res_nan /*op2 was NaN*/
  17634. +
  17635. +__avr32_f64_mul_op1_not_naninf:
  17636. + /* op1 was not NaN nor INF. Then op2 must be NaN or INF*/
  17637. + cp r9, 0
  17638. + breq __avr32_f64_mul_res_inf /*op2 was INF, return INF*/
  17639. + rjmp __avr32_f64_mul_res_nan /*else return NaN*/
  17640. +
  17641. +__avr32_f64_mul_res_subnormal:/* Multiply result was subnormal. */
  17642. +#if defined(L_avr32_f64_mul)
  17643. + /* Check how much we must scale down the mantissa. */
  17644. + neg r12
  17645. + sub r12, -1 /* We do no longer have an implicit bit. */
  17646. + satu r12 >> 0, 6 /* Saturate shift amount to max 63. */
  17647. + cp.w r12, 32
  17648. + brge 0f
  17649. + /* Shift amount <32 */
  17650. + rsub r8, r12, 32
  17651. + or r6, r7
  17652. + lsr r7, r7, r12
  17653. + lsl r9, r10, r8
  17654. + or r7, r9
  17655. + lsr r10, r10, r12
  17656. + lsl r9, r11, r8
  17657. + or r10, r9
  17658. + lsr r11, r11, r12
  17659. + rjmp 24b
  17660. +0:
  17661. + /* Shift amount >=32 */
  17662. + rsub r8, r12, 32
  17663. + moveq r9, 0
  17664. + breq 0f
  17665. + lsl r9, r11, r8
  17666. +0:
  17667. + or r6, r7
  17668. + or r6, r6, r10 << 1
  17669. + lsr r10, r10, r12
  17670. + or r7, r9, r10
  17671. + lsr r10, r11, r12
  17672. + mov r11, 0
  17673. + rjmp 24b
  17674. +#else
  17675. + /* Flush to zero for the fast version. */
  17676. + mov r11, lr /*Get correct sign*/
  17677. + andh r11, 0x8000, COH
  17678. + mov r10, 0
  17679. + ldm sp++, r5, r6, r7,pc
  17680. +#endif
  17681. +
  17682. +__avr32_f64_mul_res_zero:/* Multiply result is zero. */
  17683. + mov r11, lr /*Get correct sign*/
  17684. + andh r11, 0x8000, COH
  17685. + mov r10, 0
  17686. +#if defined(L_avr32_f64_mul)
  17687. + popm r4-r7, pc
  17688. +#else
  17689. + ldm sp++, r5, r6, r7,pc
  17690. +#endif
  17691. +
  17692. +__avr32_f64_mul_res_nan: /* Return NaN. */
  17693. + mov r11, -1
  17694. + mov r10, -1
  17695. +#if defined(L_avr32_f64_mul)
  17696. + popm r4-r7, pc
  17697. +#else
  17698. + ldm sp++, r5, r6, r7,pc
  17699. +#endif
  17700. +
  17701. +__avr32_f64_mul_res_inf: /* Return INF. */
  17702. + mov r11, 0xfff00000
  17703. + bld lr, 31
  17704. + bst r11, 31
  17705. + mov r10, 0
  17706. +#if defined(L_avr32_f64_mul)
  17707. + popm r4-r7, pc
  17708. +#else
  17709. + ldm sp++, r5, r6, r7,pc
  17710. +#endif
  17711. +
  17712. +__avr32_f64_mul_op1_zero:
  17713. + /* Get sign */
  17714. + eor r11, r11, r9
  17715. + andh r11, 0x8000, COH
  17716. + /* Check if op2 is Inf or NaN. */
  17717. + bfextu r12, r9, 20, 11
  17718. + cp.w r12, 0x7ff
  17719. + retne r12 /* Return 0.0 */
  17720. + /* Return NaN */
  17721. + mov r10, -1
  17722. + mov r11, -1
  17723. + ret r12
  17724. +
  17725. +
  17726. +
  17727. +#endif
  17728. +
  17729. +
  17730. +#if defined(L_avr32_f64_addsub) || defined(L_avr32_f64_addsub_fast)
  17731. + .align 2
  17732. +
  17733. +__avr32_f64_sub_from_add:
  17734. + /* Switch sign on op2 */
  17735. + eorh r9, 0x8000
  17736. +
  17737. +#if defined(L_avr32_f64_addsub_fast)
  17738. + .global __avr32_f64_sub_fast
  17739. + .type __avr32_f64_sub_fast,@function
  17740. +__avr32_f64_sub_fast:
  17741. +#else
  17742. + .global __avr32_f64_sub
  17743. + .type __avr32_f64_sub,@function
  17744. +__avr32_f64_sub:
  17745. +#endif
  17746. +
  17747. + /* op1 in {r11,r10}*/
  17748. + /* op2 in {r9,r8}*/
  17749. +
  17750. +#if defined(L_avr32_f64_addsub_fast)
  17751. + /* If op2 is zero just return op1 */
  17752. + or r12, r8, r9 << 1
  17753. + reteq r12
  17754. +#endif
  17755. +
  17756. + /* Check signs */
  17757. + eor r12, r11, r9
  17758. + /* Different signs, use addition. */
  17759. + brmi __avr32_f64_add_from_sub
  17760. +
  17761. + stm --sp, r5, r6, r7, lr
  17762. +
  17763. + /* Get sign of op1 into r12 */
  17764. + mov r12, r11
  17765. + andh r12, 0x8000, COH
  17766. +
  17767. + /* Remove sign from operands */
  17768. + cbr r11, 31
  17769. + cbr r9, 31
  17770. +
  17771. + /* Put the largest number in [r11, r10]
  17772. + and the smallest number in [r9, r8] */
  17773. + cp r10, r8
  17774. + cpc r11, r9
  17775. + brhs 1f /* Skip swap if operands already correctly ordered*/
  17776. + /* Operands were not correctly ordered, swap them*/
  17777. + mov r7, r11
  17778. + mov r11, r9
  17779. + mov r9, r7
  17780. + mov r7, r10
  17781. + mov r10, r8
  17782. + mov r8, r7
  17783. + eorh r12, 0x8000 /* Invert sign in r12*/
  17784. +1:
  17785. + /* Unpack largest operand - opH */
  17786. + /* exp: r7 */
  17787. + /* sf: r11, r10 */
  17788. + lsr r7, r11, 20 /* Extract exponent */
  17789. + lsl r11, 11 /* Extract mantissa, leave room for implicit bit */
  17790. + or r11, r11, r10>>21
  17791. + lsl r10, 11
  17792. + sbr r11, 31 /* Insert implicit bit */
  17793. +
  17794. +
  17795. + /* Unpack smallest operand - opL */
  17796. + /* exp: r6 */
  17797. + /* sf: r9, r8 */
  17798. + lsr r6, r9, 20 /* Extract exponent */
  17799. + breq __avr32_f64_sub_opL_subnormal /* If either zero or subnormal */
  17800. + lsl r9, 11 /* Extract mantissa, leave room for implicit bit */
  17801. + or r9, r9, r8>>21
  17802. + lsl r8, 11
  17803. + sbr r9, 31 /* Insert implicit bit */
  17804. +
  17805. +
  17806. +__avr32_f64_sub_opL_subnormal_done:
  17807. + /* opH is NaN or Inf. */
  17808. + cp.w r7, 0x7ff
  17809. + breq __avr32_f64_sub_opH_nan_or_inf
  17810. +
  17811. + /* Get shift amount to scale mantissa of op2. */
  17812. + rsub r6, r7
  17813. + breq __avr32_f64_sub_shift_done /* No need to shift, exponents are equal*/
  17814. +
  17815. + /* Scale mantissa [r9, r8] with amount [r6].
  17816. + Uses scratch registers [r5] and [lr].
  17817. + In IEEE mode:Must not forget the sticky bits we intend to shift out. */
  17818. +
  17819. + rsub r5,r6,32 /* get (32 - shift count)
  17820. + (if shift count > 32 we get a
  17821. + negative value, but that will
  17822. + work as well in the code below.) */
  17823. +
  17824. + cp.w r6,32 /* handle shifts >= 32 separately */
  17825. + brhs __avr32_f64_sub_longshift
  17826. +
  17827. + /* small (<32) shift amount, both words are part of the shift
  17828. + first remember whether part that is lost contains any 1 bits ... */
  17829. + lsl lr,r8,r5 /* shift away bits that are part of
  17830. + final mantissa. only part that goes
  17831. + to lr are bits that will be lost */
  17832. +
  17833. + /* ... and now to the actual shift */
  17834. + lsl r5,r9,r5 /* get bits from msw destined for lsw*/
  17835. + lsr r8,r8,r6 /* shift down lsw of mantissa */
  17836. + lsr r9,r9,r6 /* shift down msw of mantissa */
  17837. + or r8,r5 /* combine these bits with prepared lsw*/
  17838. +#if defined(L_avr32_f64_addsub)
  17839. + cp.w lr,0 /* if any '1' bit in part we lost ...*/
  17840. + srne lr
  17841. + or r8, lr /* ... we need to set sticky bit*/
  17842. +#endif
  17843. +
  17844. +__avr32_f64_sub_shift_done:
  17845. + /* Now subtract the mantissas. */
  17846. + sub r10, r8
  17847. + sbc r11, r11, r9
  17848. +
  17849. + /* Normalize the exponent and mantissa pair stored in
  17850. + [r11,r10] and exponent in [r7]. Needs two scratch registers [r6] and [lr]. */
  17851. + clz r6,r11 /* Check if we have zeros in high bits */
  17852. + breq __avr32_f64_sub_longnormalize_done /* No need for scaling if no zeros in high bits */
  17853. + brcs __avr32_f64_sub_longnormalize
  17854. +
  17855. +
  17856. + /* shift amount is smaller than 32, and involves both msw and lsw*/
  17857. + rsub lr,r6,32 /* shift mantissa */
  17858. + lsl r11,r11,r6
  17859. + lsr lr,r10,lr
  17860. + or r11,lr
  17861. + lsl r10,r10,r6
  17862. +
  17863. + sub r7,r6 /* adjust exponent */
  17864. + brle __avr32_f64_sub_subnormal_result
  17865. +__avr32_f64_sub_longnormalize_done:
  17866. +
  17867. +#if defined(L_avr32_f64_addsub)
  17868. + /* Insert the bits we will remove from the mantissa r9[31:21] */
  17869. + lsl r9, r10, (32 - 11)
  17870. +#else
  17871. + /* Keep the last bit shifted out. */
  17872. + bfextu r9, r10, 10, 1
  17873. +#endif
  17874. +
  17875. + /* Pack final result*/
  17876. + /* Input: [r7]:exp, [r11, r10]:mant, [r12]:sign in MSB */
  17877. + /* Result in [r11,r10] */
  17878. + /* Insert mantissa */
  17879. + lsr r10, 11
  17880. + or r10, r10, r11<<21
  17881. + lsr r11, 11
  17882. + /* Insert exponent and sign bit*/
  17883. + bfins r11, r7, 20, 11
  17884. + or r11, r12
  17885. +
  17886. + /* Round */
  17887. +__avr32_f64_sub_round:
  17888. +#if defined(L_avr32_f64_addsub)
  17889. + mov_imm r7, 0x80000000
  17890. + bld r10, 0
  17891. + subne r7, -1
  17892. +
  17893. + cp.w r9, r7
  17894. + srhs r9
  17895. +#endif
  17896. + add r10, r9
  17897. + acr r11
  17898. +
  17899. + /* Return result in [r11,r10] */
  17900. + ldm sp++, r5, r6, r7,pc
  17901. +
  17902. +
  17903. +
  17904. +__avr32_f64_sub_opL_subnormal:
  17905. + /* Extract the of mantissa */
  17906. + lsl r9, 11 /* Extract mantissa, leave room for implicit bit */
  17907. + or r9, r9, r8>>21
  17908. + lsl r8, 11
  17909. +
  17910. + /* Set exponent to 1 if we do not have a zero. */
  17911. + or lr, r9, r8
  17912. + movne r6,1
  17913. +
  17914. + /* Check if opH is also subnormal. If so, clear implicit bit in r11*/
  17915. + rsub lr, r7, 0
  17916. + moveq r7,1
  17917. + bst r11, 31
  17918. +
  17919. + /* Check if op1 is zero, if so set exponent to 0. */
  17920. + or lr, r11, r10
  17921. + moveq r7,0
  17922. +
  17923. + rjmp __avr32_f64_sub_opL_subnormal_done
  17924. +
  17925. +__avr32_f64_sub_opH_nan_or_inf:
  17926. + /* Check if opH is NaN, if so return NaN */
  17927. + cbr r11, 31
  17928. + or lr, r11, r10
  17929. + brne __avr32_f64_sub_return_nan
  17930. +
  17931. + /* opH is Inf. */
  17932. + /* Check if opL is Inf. or NaN */
  17933. + cp.w r6, 0x7ff
  17934. + breq __avr32_f64_sub_return_nan
  17935. + /* Return infinity with correct sign. */
  17936. + or r11, r12, r7 << 20
  17937. + ldm sp++, r5, r6, r7, pc/* opL not Inf or NaN, return opH */
  17938. +__avr32_f64_sub_return_nan:
  17939. + mov r10, -1 /* Generate NaN in r11, r10 */
  17940. + mov r11, -1
  17941. + ldm sp++, r5, r6, r7, pc/* opL Inf or NaN, return NaN */
  17942. +
  17943. +
  17944. +__avr32_f64_sub_subnormal_result:
  17945. +#if defined(L_avr32_f64_addsub)
  17946. + /* Check how much we must scale down the mantissa. */
  17947. + neg r7
  17948. + sub r7, -1 /* We do no longer have an implicit bit. */
  17949. + satu r7 >> 0, 6 /* Saturate shift amount to max 63. */
  17950. + cp.w r7, 32
  17951. + brge 0f
  17952. + /* Shift amount <32 */
  17953. + rsub r8, r7, 32
  17954. + lsl r9, r10, r8
  17955. + srne r6
  17956. + lsr r10, r10, r7
  17957. + or r10, r6 /* Sticky bit from the
  17958. + part that was shifted out. */
  17959. + lsl r9, r11, r8
  17960. + or r10, r10, r9
  17961. + lsr r11, r10, r7
  17962. + /* Set exponent */
  17963. + mov r7, 0
  17964. + rjmp __avr32_f64_sub_longnormalize_done
  17965. +0:
  17966. + /* Shift amount >=32 */
  17967. + rsub r8, r7, 64
  17968. + lsl r9, r11, r8
  17969. + or r9, r10
  17970. + srne r6
  17971. + lsr r10, r11, r7
  17972. + or r10, r6 /* Sticky bit from the
  17973. + part that was shifted out. */
  17974. + mov r11, 0
  17975. + /* Set exponent */
  17976. + mov r7, 0
  17977. + rjmp __avr32_f64_sub_longnormalize_done
  17978. +#else
  17979. + /* Just flush subnormals to zero. */
  17980. + mov r10, 0
  17981. + mov r11, 0
  17982. +#endif
  17983. + ldm sp++, r5, r6, r7, pc
  17984. +
  17985. +__avr32_f64_sub_longshift:
  17986. + /* large (>=32) shift amount, only lsw will have bits left after shift.
  17987. + note that shift operations will use ((shift count=r6) mod 32) so
  17988. + we do not need to subtract 32 from shift count. */
  17989. + /* Saturate the shift amount to 63. If the amount
  17990. + is any larger op2 is insignificant. */
  17991. + satu r6 >> 0, 6
  17992. +
  17993. +#if defined(L_avr32_f64_addsub)
  17994. + /* first remember whether part that is lost contains any 1 bits ... */
  17995. + moveq lr, r8 /* If shift amount is 32, no bits from msw are lost. */
  17996. + breq 0f
  17997. + lsl lr,r9,r5 /* save all lost bits from msw */
  17998. + or lr,r8 /* also save lost bits (all) from lsw
  17999. + now lr != 0 if we lose any bits */
  18000. +#endif
  18001. +0:
  18002. + /* ... and now to the actual shift */
  18003. + lsr r8,r9,r6 /* Move msw to lsw and shift. */
  18004. + mov r9,0 /* clear msw */
  18005. +#if defined(L_avr32_f64_addsub)
  18006. + cp.w lr,0 /* if any '1' bit in part we lost ...*/
  18007. + srne lr
  18008. + or r8, lr /* ... we need to set sticky bit*/
  18009. +#endif
  18010. + rjmp __avr32_f64_sub_shift_done
  18011. +
  18012. +__avr32_f64_sub_longnormalize:
  18013. + /* shift amount is greater than 32 */
  18014. + clz r6,r10 /* shift mantissa */
  18015. + /* If the resulting mantissa is zero the result is
  18016. + zero so force exponent to zero. */
  18017. + movcs r7, 0
  18018. + movcs r6, 0
  18019. + movcs r12, 0 /* Also clear sign bit. A zero result from subtraction
  18020. + always is +0.0 */
  18021. + subcc r6,-32
  18022. + lsl r11,r10,r6
  18023. + mov r10,0
  18024. + sub r7,r6 /* adjust exponent */
  18025. + brle __avr32_f64_sub_subnormal_result
  18026. + rjmp __avr32_f64_sub_longnormalize_done
  18027. +
  18028. +
  18029. +
  18030. + .align 2
  18031. +__avr32_f64_add_from_sub:
  18032. + /* Switch sign on op2 */
  18033. + eorh r9, 0x8000
  18034. +
  18035. +#if defined(L_avr32_f64_addsub_fast)
  18036. + .global __avr32_f64_add_fast
  18037. + .type __avr32_f64_add_fast,@function
  18038. +__avr32_f64_add_fast:
  18039. +#else
  18040. + .global __avr32_f64_add
  18041. + .type __avr32_f64_add,@function
  18042. +__avr32_f64_add:
  18043. +#endif
  18044. +
  18045. + /* op1 in {r11,r10}*/
  18046. + /* op2 in {r9,r8}*/
  18047. +
  18048. +#if defined(L_avr32_f64_addsub_fast)
  18049. + /* If op2 is zero just return op1 */
  18050. + or r12, r8, r9 << 1
  18051. + reteq r12
  18052. +#endif
  18053. +
  18054. + /* Check signs */
  18055. + eor r12, r11, r9
  18056. + /* Different signs, use subtraction. */
  18057. + brmi __avr32_f64_sub_from_add
  18058. +
  18059. + stm --sp, r5, r6, r7, lr
  18060. +
  18061. + /* Get sign of op1 into r12 */
  18062. + mov r12, r11
  18063. + andh r12, 0x8000, COH
  18064. +
  18065. + /* Remove sign from operands */
  18066. + cbr r11, 31
  18067. + cbr r9, 31
  18068. +
  18069. + /* Put the number with the largest exponent in [r11, r10]
  18070. + and the number with the smallest exponent in [r9, r8] */
  18071. + cp r11, r9
  18072. + brhs 1f /* Skip swap if operands already correctly ordered */
  18073. + /* Operands were not correctly ordered, swap them */
  18074. + mov r7, r11
  18075. + mov r11, r9
  18076. + mov r9, r7
  18077. + mov r7, r10
  18078. + mov r10, r8
  18079. + mov r8, r7
  18080. +1:
  18081. + mov lr, 0 /* Set sticky bits to zero */
  18082. + /* Unpack largest operand - opH */
  18083. + /* exp: r7 */
  18084. + /* sf: r11, r10 */
  18085. + bfextu R7, R11, 20, 11 /* Extract exponent */
  18086. + bfextu r11, r11, 0, 20 /* Extract mantissa */
  18087. + sbr r11, 20 /* Insert implicit bit */
  18088. +
  18089. + /* Unpack smallest operand - opL */
  18090. + /* exp: r6 */
  18091. + /* sf: r9, r8 */
  18092. + bfextu R6, R9, 20, 11 /* Extract exponent */
  18093. + breq __avr32_f64_add_op2_subnormal
  18094. + bfextu r9, r9, 0, 20 /* Extract mantissa */
  18095. + sbr r9, 20 /* Insert implicit bit */
  18096. +
  18097. +2:
  18098. + /* opH is NaN or Inf. */
  18099. + cp.w r7, 0x7ff
  18100. + breq __avr32_f64_add_opH_nan_or_inf
  18101. +
  18102. + /* Get shift amount to scale mantissa of op2. */
  18103. + rsub r6, r7
  18104. + breq __avr32_f64_add_shift_done /* No need to shift, exponents are equal*/
  18105. +
  18106. + /* Scale mantissa [r9, r8] with amount [r6].
  18107. + Uses scratch registers [r5] and [lr].
  18108. + In IEEE mode:Must not forget the sticky bits we intend to shift out. */
  18109. + rsub r5,r6,32 /* get (32 - shift count)
  18110. + (if shift count > 32 we get a
  18111. + negative value, but that will
  18112. + work as well in the code below.) */
  18113. +
  18114. + cp.w r6,32 /* handle shifts >= 32 separately */
  18115. + brhs __avr32_f64_add_longshift
  18116. +
  18117. + /* small (<32) shift amount, both words are part of the shift
  18118. + first remember whether part that is lost contains any 1 bits ... */
  18119. + lsl lr,r8,r5 /* shift away bits that are part of
  18120. + final mantissa. only part that goes
  18121. + to lr are bits that will be lost */
  18122. +
  18123. + /* ... and now to the actual shift */
  18124. + lsl r5,r9,r5 /* get bits from msw destined for lsw*/
  18125. + lsr r8,r8,r6 /* shift down lsw of mantissa */
  18126. + lsr r9,r9,r6 /* shift down msw of mantissa */
  18127. + or r8,r5 /* combine these bits with prepared lsw*/
  18128. +
  18129. +__avr32_f64_add_shift_done:
  18130. + /* Now add the mantissas. */
  18131. + add r10, r8
  18132. + adc r11, r11, r9
  18133. +
  18134. + /* Check if we overflowed. */
  18135. + bld r11, 21
  18136. + breq __avr32_f64_add_res_of:
  18137. +
  18138. +__avr32_f64_add_res_of_done:
  18139. +
  18140. + /* Pack final result*/
  18141. + /* Input: [r7]:exp, [r11, r10]:mant, [r12]:sign in MSB */
  18142. + /* Result in [r11,r10] */
  18143. + /* Insert exponent and sign bit*/
  18144. + bfins r11, r7, 20, 11
  18145. + or r11, r12
  18146. +
  18147. + /* Round */
  18148. +__avr32_f64_add_round:
  18149. +#if defined(L_avr32_f64_addsub)
  18150. + bfextu r12, r10, 0, 1 /* Extract parity bit.*/
  18151. + or lr, r12 /* or it together with the sticky bits. */
  18152. + eorh lr, 0x8000 /* Toggle round bit. */
  18153. + /* We should now round up by adding one for the following cases:
  18154. +
  18155. + halfway sticky|parity round-up
  18156. + 0 x no
  18157. + 1 0 no
  18158. + 1 1 yes
  18159. +
  18160. + Since we have inverted the halfway bit we can use the satu instruction
  18161. + by saturating to 1 bit to implement this.
  18162. + */
  18163. + satu lr >> 0, 1
  18164. +#else
  18165. + lsr lr, 31
  18166. +#endif
  18167. + add r10, lr
  18168. + acr r11
  18169. +
  18170. + /* Return result in [r11,r10] */
  18171. + ldm sp++, r5, r6, r7,pc
  18172. +
  18173. +
  18174. +__avr32_f64_add_opH_nan_or_inf:
  18175. + /* Check if opH is NaN, if so return NaN */
  18176. + cbr r11, 20
  18177. + or lr, r11, r10
  18178. + brne __avr32_f64_add_return_nan
  18179. +
  18180. + /* opH is Inf. */
  18181. + /* Check if opL is Inf. or NaN */
  18182. + cp.w r6, 0x7ff
  18183. + breq __avr32_f64_add_opL_nan_or_inf
  18184. + ldm sp++, r5, r6, r7, pc/* opL not Inf or NaN, return opH */
  18185. +__avr32_f64_add_opL_nan_or_inf:
  18186. + cbr r9, 20
  18187. + or lr, r9, r8
  18188. + brne __avr32_f64_add_return_nan
  18189. + mov r10, 0 /* Generate Inf in r11, r10 */
  18190. + mov_imm r11, 0x7ff00000
  18191. + or r11, r12 /* Put sign bit back */
  18192. + ldm sp++, r5, r6, r7, pc/* opL Inf, return Inf */
  18193. +__avr32_f64_add_return_nan:
  18194. + mov r10, -1 /* Generate NaN in r11, r10 */
  18195. + mov r11, -1
  18196. + ldm sp++, r5, r6, r7, pc/* opL Inf or NaN, return NaN */
  18197. +
  18198. +
  18199. +__avr32_f64_add_longshift:
  18200. + /* large (>=32) shift amount, only lsw will have bits left after shift.
  18201. + note that shift operations will use ((shift count=r6) mod 32) so
  18202. + we do not need to subtract 32 from shift count. */
  18203. + /* Saturate the shift amount to 63. If the amount
  18204. + is any larger op2 is insignificant. */
  18205. + satu r6 >> 0, 6
  18206. + /* If shift amount is 32 there are no bits from the msw that are lost. */
  18207. + moveq lr, r8
  18208. + breq 0f
  18209. + /* first remember whether part that is lost contains any 1 bits ... */
  18210. + lsl lr,r9,r5 /* save all lost bits from msw */
  18211. +#if defined(L_avr32_f64_addsub)
  18212. + cp.w r8, 0
  18213. + srne r8
  18214. + or lr,r8 /* also save lost bits (all) from lsw
  18215. + now lr != 0 if we lose any bits */
  18216. +#endif
  18217. +0:
  18218. + /* ... and now to the actual shift */
  18219. + lsr r8,r9,r6 /* msw -> lsw and make rest of shift inside lsw*/
  18220. + mov r9,0 /* clear msw */
  18221. + rjmp __avr32_f64_add_shift_done
  18222. +
  18223. +__avr32_f64_add_res_of:
  18224. + /* We overflowed. Scale down mantissa by shifting right one position. */
  18225. + or lr, lr, lr << 1 /* Remember stickybits*/
  18226. + lsr r11, 1
  18227. + ror r10
  18228. + ror lr
  18229. + sub r7, -1 /* Increment exponent */
  18230. +
  18231. + /* Clear mantissa to set result to Inf if the exponent is 255. */
  18232. + cp.w r7, 0x7ff
  18233. + moveq r10, 0
  18234. + moveq r11, 0
  18235. + moveq lr, 0
  18236. + rjmp __avr32_f64_add_res_of_done
  18237. +
  18238. +__avr32_f64_add_op2_subnormal:
  18239. + /* Set epxponent to 1 */
  18240. + mov r6, 1
  18241. +
  18242. + /* Check if op2 is also subnormal. */
  18243. + cp.w r7, 0
  18244. + brne 2b
  18245. +
  18246. + cbr r11, 20
  18247. + /* Both operands are subnormal. Just addd the mantissas
  18248. + and the exponent will automatically be set to 1 if
  18249. + we overflow into a normal number. */
  18250. + add r10, r8
  18251. + adc r11, r11, r9
  18252. +
  18253. + /* Add sign bit */
  18254. + or r11, r12
  18255. +
  18256. + /* Return result in [r11,r10] */
  18257. + ldm sp++, r5, r6, r7,pc
  18258. +
  18259. +
  18260. +
  18261. +#endif
  18262. +
  18263. +#ifdef L_avr32_f64_to_u32
  18264. + /* This goes into L_fixdfsi */
  18265. +#endif
  18266. +
  18267. +
  18268. +#ifdef L_avr32_f64_to_s32
  18269. + .global __avr32_f64_to_u32
  18270. + .type __avr32_f64_to_u32,@function
  18271. +__avr32_f64_to_u32:
  18272. + cp.w r11, 0
  18273. + retmi 0 /* Negative returns 0 */
  18274. +
  18275. + /* Fallthrough to df to signed si conversion */
  18276. + .global __avr32_f64_to_s32
  18277. + .type __avr32_f64_to_s32,@function
  18278. +__avr32_f64_to_s32:
  18279. + lsl r12,r11,1
  18280. + lsr r12,21 /* extract exponent*/
  18281. + sub r12,1023 /* convert to unbiased exponent.*/
  18282. + retlo 0 /* too small exponent implies zero. */
  18283. +
  18284. +1:
  18285. + rsub r12,r12,31 /* shift count = 31 - exponent */
  18286. + mov r9,r11 /* save sign for later...*/
  18287. + lsl r11,11 /* remove exponent and sign*/
  18288. + sbr r11,31 /* add implicit bit*/
  18289. + or r11,r11,r10>>21 /* get rest of bits from lsw of double */
  18290. + lsr r11,r11,r12 /* shift down mantissa to final place */
  18291. + lsl r9,1 /* sign -> carry */
  18292. + retcc r11 /* if positive, we are done */
  18293. + neg r11 /* if negative float, negate result */
  18294. + ret r11
  18295. +
  18296. +#endif /* L_fixdfsi*/
  18297. +
  18298. +#ifdef L_avr32_f64_to_u64
  18299. + /* Actual function is in L_fixdfdi */
  18300. +#endif
  18301. +
  18302. +#ifdef L_avr32_f64_to_s64
  18303. + .global __avr32_f64_to_u64
  18304. + .type __avr32_f64_to_u64,@function
  18305. +__avr32_f64_to_u64:
  18306. + cp.w r11,0
  18307. + /* Negative numbers return zero */
  18308. + movmi r10, 0
  18309. + movmi r11, 0
  18310. + retmi r11
  18311. +
  18312. +
  18313. +
  18314. + /* Fallthrough */
  18315. + .global __avr32_f64_to_s64
  18316. + .type __avr32_f64_to_s64,@function
  18317. +__avr32_f64_to_s64:
  18318. + lsl r9,r11,1
  18319. + lsr r9,21 /* get exponent*/
  18320. + sub r9,1023 /* convert to correct range*/
  18321. + /* Return zero if exponent to small */
  18322. + movlo r10, 0
  18323. + movlo r11, 0
  18324. + retlo r11
  18325. +
  18326. + mov r8,r11 /* save sign for later...*/
  18327. +1:
  18328. + lsl r11,11 /* remove exponent */
  18329. + sbr r11,31 /* add implicit bit*/
  18330. + or r11,r11,r10>>21 /* get rest of bits from lsw of double*/
  18331. + lsl r10,11 /* align lsw correctly as well */
  18332. + rsub r9,r9,63 /* shift count = 63 - exponent */
  18333. + breq 1f
  18334. +
  18335. + cp.w r9,32 /* is shift count more than one reg? */
  18336. + brhs 0f
  18337. +
  18338. + mov r12,r11 /* save msw */
  18339. + lsr r10,r10,r9 /* small shift count, shift down lsw */
  18340. + lsr r11,r11,r9 /* small shift count, shift down msw */
  18341. + rsub r9,r9,32 /* get 32-size of shifted out tail */
  18342. + lsl r12,r12,r9 /* align part to move from msw to lsw */
  18343. + or r10,r12 /* combine to get new lsw */
  18344. + rjmp 1f
  18345. +
  18346. +0:
  18347. + lsr r10,r11,r9 /* large shift count,only lsw get bits
  18348. + note that shift count is modulo 32*/
  18349. + mov r11,0 /* msw will be 0 */
  18350. +
  18351. +1:
  18352. + lsl r8,1 /* sign -> carry */
  18353. + retcc r11 /* if positive, we are done */
  18354. +
  18355. + neg r11 /* if negative float, negate result */
  18356. + neg r10
  18357. + scr r11
  18358. + ret r11
  18359. +
  18360. +#endif
  18361. +
  18362. +#ifdef L_avr32_u32_to_f64
  18363. + /* Code located in L_floatsidf */
  18364. +#endif
  18365. +
  18366. +#ifdef L_avr32_s32_to_f64
  18367. + .global __avr32_u32_to_f64
  18368. + .type __avr32_u32_to_f64,@function
  18369. +__avr32_u32_to_f64:
  18370. + sub r11, r12, 0 /* Move to r11 and force Z flag to be updated */
  18371. + mov r12, 0 /* always positive */
  18372. + rjmp 0f /* Jump to common code for floatsidf */
  18373. +
  18374. + .global __avr32_s32_to_f64
  18375. + .type __avr32_s32_to_f64,@function
  18376. +__avr32_s32_to_f64:
  18377. + mov r11, r12 /* Keep original value in r12 for sign */
  18378. + abs r11 /* Absolute value if r12 */
  18379. +0:
  18380. + mov r10,0 /* let remaining bits be zero */
  18381. + reteq r11 /* zero long will return zero float */
  18382. +
  18383. + pushm lr
  18384. + mov r9,31+1023 /* set exponent */
  18385. +
  18386. + normalize_df r9 /*exp*/, r10, r11 /* mantissa */, r8, lr /* scratch */
  18387. +
  18388. + /* Check if a subnormal result was created */
  18389. + cp.w r9, 0
  18390. + brgt 0f
  18391. +
  18392. + adjust_subnormal_df r9 /* exp */, r10, r11 /* Mantissa */, r12 /*sign*/, r8, lr /* scratch */
  18393. + popm pc
  18394. +0:
  18395. +
  18396. + /* Round result */
  18397. + round_df r9 /*exp*/, r10, r11 /* Mantissa */, r8 /*scratch*/
  18398. + cp.w r9,0x7ff
  18399. + brlt 0f
  18400. + /*Return infinity */
  18401. + mov r10, 0
  18402. + mov_imm r11, 0xffe00000
  18403. + rjmp __floatsidf_return_op1
  18404. +
  18405. +0:
  18406. +
  18407. + /* Pack */
  18408. + pack_df r9 /*exp*/, r10, r11 /* mantissa */, r10, r11 /* Output df number*/
  18409. +__floatsidf_return_op1:
  18410. + lsl r12,1 /* shift in sign bit */
  18411. + ror r11
  18412. +
  18413. + popm pc
  18414. +#endif
  18415. +
  18416. +
  18417. +#ifdef L_avr32_f32_cmp_eq
  18418. + .global __avr32_f32_cmp_eq
  18419. + .type __avr32_f32_cmp_eq,@function
  18420. +__avr32_f32_cmp_eq:
  18421. + cp.w r12, r11
  18422. + breq 0f
  18423. + /* If not equal check for +/-0 */
  18424. + /* Or together the two values and shift out the sign bit.
  18425. + If the result is zero, then the two values are both zero. */
  18426. + or r12, r11
  18427. + lsl r12, 1
  18428. + reteq 1
  18429. + ret 0
  18430. +0:
  18431. + /* Numbers were equal. Check for NaN or Inf */
  18432. + mov_imm r11, 0xff000000
  18433. + lsl r12, 1
  18434. + cp.w r12, r11
  18435. + retls 1 /* 0 if NaN, 1 otherwise */
  18436. + ret 0
  18437. +#endif
  18438. +
  18439. +#if defined(L_avr32_f32_cmp_ge) || defined(L_avr32_f32_cmp_lt)
  18440. +#ifdef L_avr32_f32_cmp_ge
  18441. + .global __avr32_f32_cmp_ge
  18442. + .type __avr32_f32_cmp_ge,@function
  18443. +__avr32_f32_cmp_ge:
  18444. +#endif
  18445. +#ifdef L_avr32_f32_cmp_lt
  18446. + .global __avr32_f32_cmp_lt
  18447. + .type __avr32_f32_cmp_lt,@function
  18448. +__avr32_f32_cmp_lt:
  18449. +#endif
  18450. + lsl r10, r12, 1 /* Remove sign bits */
  18451. + lsl r9, r11, 1
  18452. + subfeq r10, 0
  18453. +#ifdef L_avr32_f32_cmp_ge
  18454. + reteq 1 /* Both number are zero. Return true. */
  18455. +#endif
  18456. +#ifdef L_avr32_f32_cmp_lt
  18457. + reteq 0 /* Both number are zero. Return false. */
  18458. +#endif
  18459. + mov_imm r8, 0xff000000
  18460. + cp.w r10, r8
  18461. + rethi 0 /* Op0 is NaN */
  18462. + cp.w r9, r8
  18463. + rethi 0 /* Op1 is Nan */
  18464. +
  18465. + eor r8, r11, r12
  18466. + bld r12, 31
  18467. +#ifdef L_avr32_f32_cmp_ge
  18468. + srcc r8 /* Set result to true if op0 is positive*/
  18469. +#endif
  18470. +#ifdef L_avr32_f32_cmp_lt
  18471. + srcs r8 /* Set result to true if op0 is negative*/
  18472. +#endif
  18473. + retmi r8 /* Return if signs are different */
  18474. + brcs 0f /* Both signs negative? */
  18475. +
  18476. + /* Both signs positive */
  18477. + cp.w r12, r11
  18478. +#ifdef L_avr32_f32_cmp_ge
  18479. + reths 1
  18480. + retlo 0
  18481. +#endif
  18482. +#ifdef L_avr32_f32_cmp_lt
  18483. + reths 0
  18484. + retlo 1
  18485. +#endif
  18486. +0:
  18487. + /* Both signs negative */
  18488. + cp.w r11, r12
  18489. +#ifdef L_avr32_f32_cmp_ge
  18490. + reths 1
  18491. + retlo 0
  18492. +#endif
  18493. +#ifdef L_avr32_f32_cmp_lt
  18494. + reths 0
  18495. + retlo 1
  18496. +#endif
  18497. +#endif
  18498. +
  18499. +
  18500. +#ifdef L_avr32_f64_cmp_eq
  18501. + .global __avr32_f64_cmp_eq
  18502. + .type __avr32_f64_cmp_eq,@function
  18503. +__avr32_f64_cmp_eq:
  18504. + cp.w r10,r8
  18505. + cpc r11,r9
  18506. + breq 0f
  18507. +
  18508. + /* Args were not equal*/
  18509. + /* Both args could be zero with different sign bits */
  18510. + lsl r11,1 /* get rid of sign bits */
  18511. + lsl r9,1
  18512. + or r11,r10 /* Check if all bits are zero */
  18513. + or r11,r9
  18514. + or r11,r8
  18515. + reteq 1 /* If all zeros the arguments are equal
  18516. + so return 1 else return 0 */
  18517. + ret 0
  18518. +0:
  18519. + /* check for NaN */
  18520. + lsl r11,1
  18521. + mov_imm r12, 0xffe00000
  18522. + cp.w r10,0
  18523. + cpc r11,r12 /* check if nan or inf */
  18524. + retls 1 /* If Arg is NaN return 0 else 1*/
  18525. + ret 0 /* Return */
  18526. +
  18527. +#endif
  18528. +
  18529. +
  18530. +#if defined(L_avr32_f64_cmp_ge) || defined(L_avr32_f64_cmp_lt)
  18531. +
  18532. +#ifdef L_avr32_f64_cmp_ge
  18533. + .global __avr32_f64_cmp_ge
  18534. + .type __avr32_f64_cmp_ge,@function
  18535. +__avr32_f64_cmp_ge:
  18536. +#endif
  18537. +#ifdef L_avr32_f64_cmp_lt
  18538. + .global __avr32_f64_cmp_lt
  18539. + .type __avr32_f64_cmp_lt,@function
  18540. +__avr32_f64_cmp_lt:
  18541. +#endif
  18542. +
  18543. + /* compare magnitude of op1 and op2 */
  18544. + st.w --sp, lr
  18545. + st.w --sp, r7
  18546. + lsl r11,1 /* Remove sign bit of op1 */
  18547. + srcs r12 /* Sign op1 to lsb of r12*/
  18548. + lsl r9,1 /* Remove sign bit of op2 */
  18549. + srcs r7
  18550. + rol r12 /* Sign op2 to lsb of lr, sign bit op1 bit 1 of r12*/
  18551. +
  18552. +
  18553. + /* Check for Nan */
  18554. + mov_imm lr, 0xffe00000
  18555. + cp.w r10,0
  18556. + cpc r11,lr
  18557. + brhi 0f /* We have NaN */
  18558. + cp.w r8,0
  18559. + cpc r9,lr
  18560. + brhi 0f /* We have NaN */
  18561. +
  18562. + cp.w r11, 0
  18563. + subfeq r10, 0
  18564. + breq 3f /* op1 zero */
  18565. + ld.w r7, sp++
  18566. + ld.w lr, sp++
  18567. +
  18568. + cp.w r12,3 /* both operands negative ?*/
  18569. + breq 1f
  18570. +
  18571. + cp.w r12,1 /* both operands positive? */
  18572. + brlo 2f
  18573. +
  18574. + /* Different signs. If sign of op1 is negative the difference
  18575. + between op1 and op2 will always be negative, and if op1 is
  18576. + positive the difference will always be positive */
  18577. +#ifdef L_avr32_f64_cmp_ge
  18578. + reteq 1
  18579. + retne 0
  18580. +#endif
  18581. +#ifdef L_avr32_f64_cmp_lt
  18582. + reteq 0
  18583. + retne 1
  18584. +#endif
  18585. +
  18586. +2:
  18587. + /* Both operands positive. Just compute the difference */
  18588. + cp.w r10,r8
  18589. + cpc r11,r9
  18590. +#ifdef L_avr32_f64_cmp_ge
  18591. + reths 1
  18592. + retlo 0
  18593. +#endif
  18594. +#ifdef L_avr32_f64_cmp_lt
  18595. + reths 0
  18596. + retlo 1
  18597. +#endif
  18598. +
  18599. +1:
  18600. + /* Both operands negative. Compute the difference with operands switched */
  18601. + cp r8,r10
  18602. + cpc r9,r11
  18603. +#ifdef L_avr32_f64_cmp_ge
  18604. + reths 1
  18605. + retlo 0
  18606. +#endif
  18607. +#ifdef L_avr32_f64_cmp_lt
  18608. + reths 0
  18609. + retlo 1
  18610. +#endif
  18611. +
  18612. +0:
  18613. + ld.w r7, sp++
  18614. + popm pc, r12=0
  18615. +
  18616. +3:
  18617. + cp.w r7, 1 /* Check sign bit from r9 */
  18618. +#ifdef L_avr32_f64_cmp_ge
  18619. + sreq r12 /* If op2 is negative then op1 >= op2. */
  18620. +#endif
  18621. +#ifdef L_avr32_f64_cmp_lt
  18622. + srne r12 /* If op2 is positve then op1 <= op2. */
  18623. +#endif
  18624. + cp.w r9, 0
  18625. + subfeq r8, 0
  18626. + ld.w r7, sp++
  18627. + ld.w lr, sp++
  18628. +#ifdef L_avr32_f64_cmp_ge
  18629. + reteq 1 /* Both operands are zero. Return true. */
  18630. +#endif
  18631. +#ifdef L_avr32_f64_cmp_lt
  18632. + reteq 0 /* Both operands are zero. Return false. */
  18633. +#endif
  18634. + ret r12
  18635. +#endif
  18636. +
  18637. +#if defined(L_avr32_f64_div) || defined(L_avr32_f64_div_fast)
  18638. + .align 2
  18639. +
  18640. +#if defined(L_avr32_f64_div_fast)
  18641. + .global __avr32_f64_div_fast
  18642. + .type __avr32_f64_div_fast,@function
  18643. +__avr32_f64_div_fast:
  18644. +#else
  18645. + .global __avr32_f64_div
  18646. + .type __avr32_f64_div,@function
  18647. +__avr32_f64_div:
  18648. +#endif
  18649. + stm --sp, r0, r1, r2, r3, r4, r5, r6, r7,lr
  18650. + /* op1 in {r11,r10}*/
  18651. + /* op2 in {r9,r8}*/
  18652. + eor lr, r11, r9 /* MSB(lr) = Sign(op1) ^ Sign(op2) */
  18653. +
  18654. +
  18655. + /* Unpack op1 to 2.62 format*/
  18656. + /* exp: r7 */
  18657. + /* sf: r11, r10 */
  18658. + lsr r7, r11, 20 /* Extract exponent */
  18659. +
  18660. + lsl r11, 9 /* Extract mantissa, leave room for implicit bit */
  18661. + or r11, r11, r10>>23
  18662. + lsl r10, 9
  18663. + sbr r11, 29 /* Insert implicit bit */
  18664. + andh r11, 0x3fff /*Mask last part of exponent since we use 2.62 format*/
  18665. +
  18666. + cbr r7, 11 /* Clear sign bit */
  18667. + /* Check if normalization is needed */
  18668. + breq 11f /*If number is subnormal, normalize it */
  18669. +22:
  18670. + cp r7, 0x7ff
  18671. + brge 2f /* Check op1 for NaN or Inf */
  18672. +
  18673. + /* Unpack op2 to 2.62 format*/
  18674. + /* exp: r6 */
  18675. + /* sf: r9, r8 */
  18676. + lsr r6, r9, 20 /* Extract exponent */
  18677. +
  18678. + lsl r9, 9 /* Extract mantissa, leave room for implicit bit */
  18679. + or r9, r9, r8>>23
  18680. + lsl r8, 9
  18681. + sbr r9, 29 /* Insert implicit bit */
  18682. + andh r9, 0x3fff /*Mask last part of exponent since we use 2.62 format*/
  18683. +
  18684. + cbr r6, 11 /* Clear sign bit */
  18685. + /* Check if normalization is needed */
  18686. + breq 13f /*If number is subnormal, normalize it */
  18687. +23:
  18688. + cp r6, 0x7ff
  18689. + brge 3f /* Check op2 for NaN or Inf */
  18690. +
  18691. + /* Calculate new exponent */
  18692. + sub r7, r6
  18693. + sub r7,-1023
  18694. +
  18695. + /* Divide */
  18696. + /* Approximating 1/d with the following recurrence: */
  18697. + /* R[j+1] = R[j]*(2-R[j]*d) */
  18698. + /* Using 2.62 format */
  18699. + /* TWO: r12 */
  18700. + /* d = op2 = divisor (2.62 format): r9,r8 */
  18701. + /* Multiply result : r5, r4 */
  18702. + /* Initial guess : r3, r2 */
  18703. + /* New approximations : r3, r2 */
  18704. + /* op1 = Dividend (2.62 format) : r11, r10 */
  18705. +
  18706. + mov_imm r12, 0x80000000
  18707. +
  18708. + /* Load initial guess, using look-up table */
  18709. + /* Initial guess is of format 01.XY, where XY is constructed as follows: */
  18710. + /* Let d be of following format: 00.1xy....., then XY=~xy */
  18711. + /* For d=00.100 = 0,5 -> initial guess=01.11 = 1,75 */
  18712. + /* For d=00.101 = 0,625 -> initial guess=01.11 = 1,5 */
  18713. + /* For d=00.110 = 0,75 -> initial guess=01.11 = 1,25 */
  18714. + /* For d=00.111 = 0,875 -> initial guess=01.11 = 1,0 */
  18715. + /* r2 is also part of the reg pair forming initial guess, but it*/
  18716. + /* is kept uninitialized to save one cycle since it has so low significance*/
  18717. +
  18718. + lsr r3, r12, 1
  18719. + bfextu r4, r9, 27, 2
  18720. + com r4
  18721. + bfins r3, r4, 28, 2
  18722. +
  18723. + /* First approximation */
  18724. + /* Approximating to 32 bits */
  18725. + /* r5 = R[j]*d */
  18726. + mulu.d r4, r3, r9
  18727. + /* r5 = 2-R[j]*d */
  18728. + sub r5, r12, r5<<2
  18729. + /* r3 = R[j]*(2-R[j]*d) */
  18730. + mulu.d r4, r3, r5
  18731. + lsl r3, r5, 2
  18732. +
  18733. + /* Second approximation */
  18734. + /* Approximating to 32 bits */
  18735. + /* r5 = R[j]*d */
  18736. + mulu.d r4, r3, r9
  18737. + /* r5 = 2-R[j]*d */
  18738. + sub r5, r12, r5<<2
  18739. + /* r3 = R[j]*(2-R[j]*d) */
  18740. + mulu.d r4, r3, r5
  18741. + lsl r3, r5, 2
  18742. +
  18743. + /* Third approximation */
  18744. + /* Approximating to 32 bits */
  18745. + /* r5 = R[j]*d */
  18746. + mulu.d r4, r3, r9
  18747. + /* r5 = 2-R[j]*d */
  18748. + sub r5, r12, r5<<2
  18749. + /* r3 = R[j]*(2-R[j]*d) */
  18750. + mulu.d r4, r3, r5
  18751. + lsl r3, r5, 2
  18752. +
  18753. + /* Fourth approximation */
  18754. + /* Approximating to 64 bits */
  18755. + /* r5,r4 = R[j]*d */
  18756. + mul_approx_df r3 /*ah*/, r2 /*al*/, r9 /*bh*/, r8 /*bl*/, r5 /*rh*/, r4 /*rl*/, r1 /*sh*/, r0 /*sl*/
  18757. + lsl r5, 2
  18758. + or r5, r5, r4>>30
  18759. + lsl r4, 2
  18760. + /* r5,r4 = 2-R[j]*d */
  18761. + neg r4
  18762. + sbc r5, r12, r5
  18763. + /* r3,r2 = R[j]*(2-R[j]*d) */
  18764. + mul_approx_df r3 /*ah*/, r2 /*al*/, r5 /*bh*/, r4 /*bl*/, r5 /*rh*/, r4 /*rl*/, r1 /*sh*/, r0 /*sl*/
  18765. + lsl r3, r5, 2
  18766. + or r3, r3, r4>>30
  18767. + lsl r2, r4, 2
  18768. +
  18769. +
  18770. + /* Fifth approximation */
  18771. + /* Approximating to 64 bits */
  18772. + /* r5,r4 = R[j]*d */
  18773. + mul_approx_df r3 /*ah*/, r2 /*al*/, r9 /*bh*/, r8 /*bl*/, r5 /*rh*/, r4 /*rl*/, r1 /*sh*/, r0 /*sl*/
  18774. + lsl r5, 2
  18775. + or r5, r5, r4>>30
  18776. + lsl r4, 2
  18777. + /* r5,r4 = 2-R[j]*d */
  18778. + neg r4
  18779. + sbc r5, r12, r5
  18780. + /* r3,r2 = R[j]*(2-R[j]*d) */
  18781. + mul_approx_df r3 /*ah*/, r2 /*al*/, r5 /*bh*/, r4 /*bl*/, r5 /*rh*/, r4 /*rl*/, r1 /*sh*/, r0 /*sl*/
  18782. + lsl r3, r5, 2
  18783. + or r3, r3, r4>>30
  18784. + lsl r2, r4, 2
  18785. +
  18786. +
  18787. + /* Multiply with dividend to get quotient */
  18788. + mul_approx_df r3 /*ah*/, r2 /*al*/, r11 /*bh*/, r10 /*bl*/, r3 /*rh*/, r2 /*rl*/, r1 /*sh*/, r0 /*sl*/
  18789. +
  18790. +
  18791. + /* To increase speed, this result is not corrected before final rounding.*/
  18792. + /* This may give a difference to IEEE compliant code of 1 ULP.*/
  18793. +
  18794. +
  18795. + /* Adjust exponent and mantissa */
  18796. + /* r7:exp, [r3, r2]:mant, [r5, r4]:scratch*/
  18797. + /* Mantissa may be of the format 0.xxxx or 1.xxxx. */
  18798. + /* In the first case, shift one pos to left.*/
  18799. + bld r3, 31-3
  18800. + breq 0f
  18801. + lsl r2, 1
  18802. + rol r3
  18803. + sub r7, 1
  18804. +#if defined(L_avr32_f64_div)
  18805. + /* We must scale down the dividend to 5.59 format. */
  18806. + lsr r10, 3
  18807. + or r10, r10, r11 << 29
  18808. + lsr r11, 3
  18809. + rjmp 1f
  18810. +#endif
  18811. +0:
  18812. +#if defined(L_avr32_f64_div)
  18813. + /* We must scale down the dividend to 6.58 format. */
  18814. + lsr r10, 4
  18815. + or r10, r10, r11 << 28
  18816. + lsr r11, 4
  18817. +1:
  18818. +#endif
  18819. + cp r7, 0
  18820. + brle __avr32_f64_div_res_subnormal /* Result was subnormal. */
  18821. +
  18822. +
  18823. +#if defined(L_avr32_f64_div)
  18824. + /* In order to round correctly we calculate the remainder:
  18825. + Remainder = dividend[11:r10] - divisor[r9:r8]*quotient[r3:r2]
  18826. + for the case when the quotient is halfway between the round-up
  18827. + value and the round down value. If the remainder then is negative
  18828. + it means that the quotient was to big and that it should not be
  18829. + rounded up, if the remainder is positive the quotient was to small
  18830. + and we need to round up. If the remainder is zero it means that the
  18831. + quotient is exact but since we need to remove the guard bit we should
  18832. + round to even. */
  18833. +
  18834. + /* Truncate and add guard bit. */
  18835. + andl r2, 0xff00
  18836. + orl r2, 0x0080
  18837. +
  18838. +
  18839. + /* Now do the multiplication. The quotient has the format 4.60
  18840. + while the divisor has the format 2.62 which gives a result
  18841. + of 6.58 */
  18842. + mulu.d r0, r3, r8
  18843. + macu.d r0, r2, r9
  18844. + mulu.d r4, r2, r8
  18845. + mulu.d r8, r3, r9
  18846. + add r5, r0
  18847. + adc r8, r8, r1
  18848. + acr r9
  18849. +
  18850. +
  18851. + /* Check if remainder is positive, negative or equal. */
  18852. + bfextu r12, r2, 8, 1 /* Get parity bit into bit 0 of r0 */
  18853. + cp r4, 0
  18854. + cpc r5
  18855. +__avr32_f64_div_round_subnormal:
  18856. + cpc r8, r10
  18857. + cpc r9, r11
  18858. + srlo r6 /* Remainder positive: we need to round up.*/
  18859. + moveq r6, r12 /* Remainder zero: round up if mantissa odd. */
  18860. +#else
  18861. + bfextu r6, r2, 7, 1 /* Get guard bit */
  18862. +#endif
  18863. + /* Final packing, scale down mantissa. */
  18864. + lsr r10, r2, 8
  18865. + or r10, r10, r3<<24
  18866. + lsr r11, r3, 8
  18867. + /* Insert exponent and sign bit*/
  18868. + bfins r11, r7, 20, 11
  18869. + bld lr, 31
  18870. + bst r11, 31
  18871. +
  18872. + /* Final rounding */
  18873. + add r10, r6
  18874. + acr r11
  18875. +
  18876. + /* Return result in [r11,r10] */
  18877. + ldm sp++, r0, r1, r2, r3, r4, r5, r6, r7,pc
  18878. +
  18879. +
  18880. +2:
  18881. + /* Op1 is NaN or inf */
  18882. + andh r11, 0x000f /* Extract mantissa */
  18883. + or r11, r10
  18884. + brne 16f /* Return NaN if op1 is NaN */
  18885. + /* Op1 is inf check op2 */
  18886. + lsr r6, r9, 20 /* Extract exponent */
  18887. + cbr r6, 11 /* Clear sign bit */
  18888. + cp r6, 0x7ff
  18889. + brne 17f /* Inf/number gives inf, return inf */
  18890. + rjmp 16f /* The rest gives NaN*/
  18891. +
  18892. +3:
  18893. + /* Op1 is a valid number. Op 2 is NaN or inf */
  18894. + andh r9, 0x000f /* Extract mantissa */
  18895. + or r9, r8
  18896. + brne 16f /* Return NaN if op2 is NaN */
  18897. + rjmp 15f /* Op2 was inf, return zero*/
  18898. +
  18899. +11: /* Op1 was denormal. Fix it. */
  18900. + lsl r11, 3
  18901. + or r11, r11, r10 >> 29
  18902. + lsl r10, 3
  18903. + /* Check if op1 is zero. */
  18904. + or r4, r10, r11
  18905. + breq __avr32_f64_div_op1_zero
  18906. + normalize_df r7 /*exp*/, r10, r11 /*Mantissa*/, r4, r5 /*scratch*/
  18907. + lsr r10, 2
  18908. + or r10, r10, r11 << 30
  18909. + lsr r11, 2
  18910. + rjmp 22b
  18911. +
  18912. +
  18913. +13: /* Op2 was denormal. Fix it */
  18914. + lsl r9, 3
  18915. + or r9, r9, r8 >> 29
  18916. + lsl r8, 3
  18917. + /* Check if op2 is zero. */
  18918. + or r4, r9, r8
  18919. + breq 17f /* Divisor is zero -> return Inf */
  18920. + normalize_df r6 /*exp*/, r8, r9 /*Mantissa*/, r4, r5 /*scratch*/
  18921. + lsr r8, 2
  18922. + or r8, r8, r9 << 30
  18923. + lsr r9, 2
  18924. + rjmp 23b
  18925. +
  18926. +
  18927. +__avr32_f64_div_res_subnormal:/* Divide result was subnormal. */
  18928. +#if defined(L_avr32_f64_div)
  18929. + /* Check how much we must scale down the mantissa. */
  18930. + neg r7
  18931. + sub r7, -1 /* We do no longer have an implicit bit. */
  18932. + satu r7 >> 0, 6 /* Saturate shift amount to max 63. */
  18933. + cp.w r7, 32
  18934. + brge 0f
  18935. + /* Shift amount <32 */
  18936. + /* Scale down quotient */
  18937. + rsub r6, r7, 32
  18938. + lsr r2, r2, r7
  18939. + lsl r12, r3, r6
  18940. + or r2, r12
  18941. + lsr r3, r3, r7
  18942. + /* Scale down the dividend to match the scaling of the quotient. */
  18943. + lsl r1, r10, r6
  18944. + lsr r10, r10, r7
  18945. + lsl r12, r11, r6
  18946. + or r10, r12
  18947. + lsr r11, r11, r7
  18948. + mov r0, 0
  18949. + rjmp 1f
  18950. +0:
  18951. + /* Shift amount >=32 */
  18952. + rsub r6, r7, 32
  18953. + moveq r0, 0
  18954. + moveq r12, 0
  18955. + breq 0f
  18956. + lsl r0, r10, r6
  18957. + lsl r12, r11, r6
  18958. +0:
  18959. + lsr r2, r3, r7
  18960. + mov r3, 0
  18961. + /* Scale down the dividend to match the scaling of the quotient. */
  18962. + lsr r1, r10, r7
  18963. + or r1, r12
  18964. + lsr r10, r11, r7
  18965. + mov r11, 0
  18966. +1:
  18967. + /* Start performing the same rounding as done for normal numbers
  18968. + but this time we have scaled the quotient and dividend and hence
  18969. + need a little different comparison. */
  18970. + /* Truncate and add guard bit. */
  18971. + andl r2, 0xff00
  18972. + orl r2, 0x0080
  18973. +
  18974. + /* Now do the multiplication. */
  18975. + mulu.d r6, r3, r8
  18976. + macu.d r6, r2, r9
  18977. + mulu.d r4, r2, r8
  18978. + mulu.d r8, r3, r9
  18979. + add r5, r6
  18980. + adc r8, r8, r7
  18981. + acr r9
  18982. +
  18983. + /* Set exponent to 0 */
  18984. + mov r7, 0
  18985. +
  18986. + /* Check if remainder is positive, negative or equal. */
  18987. + bfextu r12, r2, 8, 1 /* Get parity bit into bit 0 of r0 */
  18988. + cp r4, r0
  18989. + cpc r5, r1
  18990. + /* Now the rest of the rounding is the same as for normals. */
  18991. + rjmp __avr32_f64_div_round_subnormal
  18992. +
  18993. +#endif
  18994. +15:
  18995. + /* Flush to zero for the fast version. */
  18996. + mov r11, lr /*Get correct sign*/
  18997. + andh r11, 0x8000, COH
  18998. + mov r10, 0
  18999. + ldm sp++, r0, r1, r2, r3, r4, r5, r6, r7,pc
  19000. +
  19001. +16: /* Return NaN. */
  19002. + mov r11, -1
  19003. + mov r10, 0
  19004. + ldm sp++, r0, r1, r2, r3, r4, r5, r6, r7,pc
  19005. +
  19006. +17:
  19007. + /* Check if op1 is zero. */
  19008. + or r4, r10, r11
  19009. + breq __avr32_f64_div_op1_zero
  19010. + /* Return INF. */
  19011. + mov r11, lr /*Get correct sign*/
  19012. + andh r11, 0x8000, COH
  19013. + orh r11, 0x7ff0
  19014. + mov r10, 0
  19015. + ldm sp++, r0, r1, r2, r3, r4, r5, r6, r7,pc
  19016. +
  19017. +__avr32_f64_div_op1_zero:
  19018. + or r5, r8, r9 << 1
  19019. + breq 16b /* 0.0/0.0 -> NaN */
  19020. + bfextu r4, r9, 20, 11
  19021. + cp r4, 0x7ff
  19022. + brne 15b /* Return zero */
  19023. + /* Check if divisor is Inf or NaN */
  19024. + or r5, r8, r9 << 12
  19025. + breq 15b /* Divisor is inf -> return zero */
  19026. + rjmp 16b /* Return NaN */
  19027. +
  19028. +
  19029. +
  19030. +
  19031. +#endif
  19032. +
  19033. +#if defined(L_avr32_f32_addsub) || defined(L_avr32_f32_addsub_fast)
  19034. +
  19035. + .align 2
  19036. +__avr32_f32_sub_from_add:
  19037. + /* Switch sign on op2 */
  19038. + eorh r11, 0x8000
  19039. +
  19040. +#if defined(L_avr32_f32_addsub_fast)
  19041. + .global __avr32_f32_sub_fast
  19042. + .type __avr32_f32_sub_fast,@function
  19043. +__avr32_f32_sub_fast:
  19044. +#else
  19045. + .global __avr32_f32_sub
  19046. + .type __avr32_f32_sub,@function
  19047. +__avr32_f32_sub:
  19048. +#endif
  19049. +
  19050. + /* Check signs */
  19051. + eor r8, r11, r12
  19052. + /* Different signs, use subtraction. */
  19053. + brmi __avr32_f32_add_from_sub
  19054. +
  19055. + /* Get sign of op1 */
  19056. + mov r8, r12
  19057. + andh r12, 0x8000, COH
  19058. +
  19059. + /* Remove sign from operands */
  19060. + cbr r11, 31
  19061. +#if defined(L_avr32_f32_addsub_fast)
  19062. + reteq r8 /* If op2 is zero return op1 */
  19063. +#endif
  19064. + cbr r8, 31
  19065. +
  19066. + /* Put the number with the largest exponent in r10
  19067. + and the number with the smallest exponent in r9 */
  19068. + max r10, r8, r11
  19069. + min r9, r8, r11
  19070. + cp r10, r8 /*If largest operand (in R10) is not equal to op1*/
  19071. + subne r12, 1 /* Subtract 1 from sign, which will invert MSB of r12*/
  19072. + andh r12, 0x8000, COH /*Mask all but MSB*/
  19073. +
  19074. + /* Unpack exponent and mantissa of op1 */
  19075. + lsl r8, r10, 8
  19076. + sbr r8, 31 /* Set implicit bit. */
  19077. + lsr r10, 23
  19078. +
  19079. + /* op1 is NaN or Inf. */
  19080. + cp.w r10, 0xff
  19081. + breq __avr32_f32_sub_op1_nan_or_inf
  19082. +
  19083. + /* Unpack exponent and mantissa of op2 */
  19084. + lsl r11, r9, 8
  19085. + sbr r11, 31 /* Set implicit bit. */
  19086. + lsr r9, 23
  19087. +
  19088. +#if defined(L_avr32_f32_addsub)
  19089. + /* Keep sticky bit for correct IEEE rounding */
  19090. + st.w --sp, r12
  19091. +
  19092. + /* op2 is either zero or subnormal. */
  19093. + breq __avr32_f32_sub_op2_subnormal
  19094. +0:
  19095. + /* Get shift amount to scale mantissa of op2. */
  19096. + sub r12, r10, r9
  19097. +
  19098. + breq __avr32_f32_sub_shift_done
  19099. +
  19100. + /* Saturate the shift amount to 31. If the amount
  19101. + is any larger op2 is insignificant. */
  19102. + satu r12 >> 0, 5
  19103. +
  19104. + /* Put the remaining bits into r9.*/
  19105. + rsub r9, r12, 32
  19106. + lsl r9, r11, r9
  19107. +
  19108. + /* If the remaining bits are non-zero then we must subtract one
  19109. + more from opL. */
  19110. + subne r8, 1
  19111. + srne r9 /* LSB of r9 represents sticky bits. */
  19112. +
  19113. + /* Shift mantissa of op2 to same decimal point as the mantissa
  19114. + of op1. */
  19115. + lsr r11, r11, r12
  19116. +
  19117. +
  19118. +__avr32_f32_sub_shift_done:
  19119. + /* Now subtract the mantissas. */
  19120. + sub r8, r11
  19121. +
  19122. + ld.w r12, sp++
  19123. +
  19124. + /* Normalize resulting mantissa. */
  19125. + clz r11, r8
  19126. +
  19127. + retcs 0
  19128. + lsl r8, r8, r11
  19129. + sub r10, r11
  19130. + brle __avr32_f32_sub_subnormal_result
  19131. +
  19132. + /* Insert the bits we will remove from the mantissa into r9[31:24] */
  19133. + or r9, r9, r8 << 24
  19134. +#else
  19135. + /* Ignore sticky bit to simplify and speed up rounding */
  19136. + /* op2 is either zero or subnormal. */
  19137. + breq __avr32_f32_sub_op2_subnormal
  19138. +0:
  19139. + /* Get shift amount to scale mantissa of op2. */
  19140. + rsub r9, r10
  19141. +
  19142. + /* Saturate the shift amount to 31. If the amount
  19143. + is any larger op2 is insignificant. */
  19144. + satu r9 >> 0, 5
  19145. +
  19146. + /* Shift mantissa of op2 to same decimal point as the mantissa
  19147. + of op1. */
  19148. + lsr r11, r11, r9
  19149. +
  19150. + /* Now subtract the mantissas. */
  19151. + sub r8, r11
  19152. +
  19153. + /* Normalize resulting mantissa. */
  19154. + clz r9, r8
  19155. + retcs 0
  19156. + lsl r8, r8, r9
  19157. + sub r10, r9
  19158. + brle __avr32_f32_sub_subnormal_result
  19159. +#endif
  19160. +
  19161. + /* Pack result. */
  19162. + or r12, r12, r8 >> 8
  19163. + bfins r12, r10, 23, 8
  19164. +
  19165. + /* Round */
  19166. +__avr32_f32_sub_round:
  19167. +#if defined(L_avr32_f32_addsub)
  19168. + mov_imm r10, 0x80000000
  19169. + bld r12, 0
  19170. + subne r10, -1
  19171. + cp.w r9, r10
  19172. + subhs r12, -1
  19173. +#else
  19174. + bld r8, 7
  19175. + acr r12
  19176. +#endif
  19177. +
  19178. + ret r12
  19179. +
  19180. +
  19181. +__avr32_f32_sub_op2_subnormal:
  19182. + /* Fix implicit bit and adjust exponent of subnormals. */
  19183. + cbr r11, 31
  19184. + /* Set exponent to 1 if we do not have a zero. */
  19185. + movne r9,1
  19186. +
  19187. + /* Check if op1 is also subnormal. */
  19188. + cp.w r10, 0
  19189. + brne 0b
  19190. +
  19191. + cbr r8, 31
  19192. + /* If op1 is not zero set exponent to 1. */
  19193. + movne r10,1
  19194. +
  19195. + rjmp 0b
  19196. +
  19197. +__avr32_f32_sub_op1_nan_or_inf:
  19198. + /* Check if op1 is NaN, if so return NaN */
  19199. + lsl r11, r8, 1
  19200. + retne -1
  19201. +
  19202. + /* op1 is Inf. */
  19203. + bfins r12, r10, 23, 8 /* Generate Inf in r12 */
  19204. +
  19205. + /* Check if op2 is Inf. or NaN */
  19206. + lsr r11, r9, 23
  19207. + cp.w r11, 0xff
  19208. + retne r12 /* op2 not Inf or NaN, return op1 */
  19209. +
  19210. + ret -1 /* op2 Inf or NaN, return NaN */
  19211. +
  19212. +__avr32_f32_sub_subnormal_result:
  19213. + /* Check if the number is so small that
  19214. + it will be represented with zero. */
  19215. + rsub r10, r10, 9
  19216. + rsub r11, r10, 32
  19217. + retcs 0
  19218. +
  19219. + /* Shift the mantissa into the correct position.*/
  19220. + lsr r10, r8, r10
  19221. + /* Add sign bit. */
  19222. + or r12, r10
  19223. +
  19224. + /* Put the shifted out bits in the most significant part
  19225. + of r8. */
  19226. + lsl r8, r8, r11
  19227. +
  19228. +#if defined(L_avr32_f32_addsub)
  19229. + /* Add all the remainder bits used for rounding into r9 */
  19230. + or r9, r8
  19231. +#else
  19232. + lsr r8, 24
  19233. +#endif
  19234. + rjmp __avr32_f32_sub_round
  19235. +
  19236. +
  19237. + .align 2
  19238. +
  19239. +__avr32_f32_add_from_sub:
  19240. + /* Switch sign on op2 */
  19241. + eorh r11, 0x8000
  19242. +
  19243. +#if defined(L_avr32_f32_addsub_fast)
  19244. + .global __avr32_f32_add_fast
  19245. + .type __avr32_f32_add_fast,@function
  19246. +__avr32_f32_add_fast:
  19247. +#else
  19248. + .global __avr32_f32_add
  19249. + .type __avr32_f32_add,@function
  19250. +__avr32_f32_add:
  19251. +#endif
  19252. +
  19253. + /* Check signs */
  19254. + eor r8, r11, r12
  19255. + /* Different signs, use subtraction. */
  19256. + brmi __avr32_f32_sub_from_add
  19257. +
  19258. + /* Get sign of op1 */
  19259. + mov r8, r12
  19260. + andh r12, 0x8000, COH
  19261. +
  19262. + /* Remove sign from operands */
  19263. + cbr r11, 31
  19264. +#if defined(L_avr32_f32_addsub_fast)
  19265. + reteq r8 /* If op2 is zero return op1 */
  19266. +#endif
  19267. + cbr r8, 31
  19268. +
  19269. + /* Put the number with the largest exponent in r10
  19270. + and the number with the smallest exponent in r9 */
  19271. + max r10, r8, r11
  19272. + min r9, r8, r11
  19273. +
  19274. + /* Unpack exponent and mantissa of op1 */
  19275. + lsl r8, r10, 8
  19276. + sbr r8, 31 /* Set implicit bit. */
  19277. + lsr r10, 23
  19278. +
  19279. + /* op1 is NaN or Inf. */
  19280. + cp.w r10, 0xff
  19281. + breq __avr32_f32_add_op1_nan_or_inf
  19282. +
  19283. + /* Unpack exponent and mantissa of op2 */
  19284. + lsl r11, r9, 8
  19285. + sbr r11, 31 /* Set implicit bit. */
  19286. + lsr r9, 23
  19287. +
  19288. +#if defined(L_avr32_f32_addsub)
  19289. + /* op2 is either zero or subnormal. */
  19290. + breq __avr32_f32_add_op2_subnormal
  19291. +0:
  19292. + /* Keep sticky bit for correct IEEE rounding */
  19293. + st.w --sp, r12
  19294. +
  19295. + /* Get shift amount to scale mantissa of op2. */
  19296. + rsub r9, r10
  19297. +
  19298. + /* Saturate the shift amount to 31. If the amount
  19299. + is any larger op2 is insignificant. */
  19300. + satu r9 >> 0, 5
  19301. +
  19302. + /* Shift mantissa of op2 to same decimal point as the mantissa
  19303. + of op1. */
  19304. + lsr r12, r11, r9
  19305. +
  19306. + /* Put the remainding bits into r11[23:..].*/
  19307. + rsub r9, r9, (32-8)
  19308. + lsl r11, r11, r9
  19309. + /* Insert the bits we will remove from the mantissa into r11[31:24] */
  19310. + bfins r11, r12, 24, 8
  19311. +
  19312. + /* Now add the mantissas. */
  19313. + add r8, r12
  19314. +
  19315. + ld.w r12, sp++
  19316. +#else
  19317. + /* Ignore sticky bit to simplify and speed up rounding */
  19318. + /* op2 is either zero or subnormal. */
  19319. + breq __avr32_f32_add_op2_subnormal
  19320. +0:
  19321. + /* Get shift amount to scale mantissa of op2. */
  19322. + rsub r9, r10
  19323. +
  19324. + /* Saturate the shift amount to 31. If the amount
  19325. + is any larger op2 is insignificant. */
  19326. + satu r9 >> 0, 5
  19327. +
  19328. + /* Shift mantissa of op2 to same decimal point as the mantissa
  19329. + of op1. */
  19330. + lsr r11, r11, r9
  19331. +
  19332. + /* Now add the mantissas. */
  19333. + add r8, r11
  19334. +
  19335. +#endif
  19336. + /* Check if we overflowed. */
  19337. + brcs __avr32_f32_add_res_of
  19338. +1:
  19339. + /* Pack result. */
  19340. + or r12, r12, r8 >> 8
  19341. + bfins r12, r10, 23, 8
  19342. +
  19343. + /* Round */
  19344. +#if defined(L_avr32_f32_addsub)
  19345. + mov_imm r10, 0x80000000
  19346. + bld r12, 0
  19347. + subne r10, -1
  19348. + cp.w r11, r10
  19349. + subhs r12, -1
  19350. +#else
  19351. + bld r8, 7
  19352. + acr r12
  19353. +#endif
  19354. +
  19355. + ret r12
  19356. +
  19357. +__avr32_f32_add_op2_subnormal:
  19358. + /* Fix implicit bit and adjust exponent of subnormals. */
  19359. + cbr r11, 31
  19360. + /* Set exponent to 1 if we do not have a zero. */
  19361. + movne r9,1
  19362. +
  19363. + /* Check if op1 is also subnormal. */
  19364. + cp.w r10, 0
  19365. + brne 0b
  19366. + /* Both operands subnormal, just add the mantissas and
  19367. + pack. If the addition of the subnormal numbers results
  19368. + in a normal number then the exponent will automatically
  19369. + be set to 1 by the addition. */
  19370. + cbr r8, 31
  19371. + add r11, r8
  19372. + or r12, r12, r11 >> 8
  19373. + ret r12
  19374. +
  19375. +__avr32_f32_add_op1_nan_or_inf:
  19376. + /* Check if op1 is NaN, if so return NaN */
  19377. + lsl r11, r8, 1
  19378. + retne -1
  19379. +
  19380. + /* op1 is Inf. */
  19381. + bfins r12, r10, 23, 8 /* Generate Inf in r12 */
  19382. +
  19383. + /* Check if op2 is Inf. or NaN */
  19384. + lsr r11, r9, 23
  19385. + cp.w r11, 0xff
  19386. + retne r12 /* op2 not Inf or NaN, return op1 */
  19387. +
  19388. + lsl r9, 9
  19389. + reteq r12 /* op2 Inf return op1 */
  19390. + ret -1 /* op2 is NaN, return NaN */
  19391. +
  19392. +__avr32_f32_add_res_of:
  19393. + /* We overflowed. Increase exponent and shift mantissa.*/
  19394. + lsr r8, 1
  19395. + sub r10, -1
  19396. +
  19397. + /* Clear mantissa to set result to Inf if the exponent is 255. */
  19398. + cp.w r10, 255
  19399. + moveq r8, 0
  19400. + moveq r11, 0
  19401. + rjmp 1b
  19402. +
  19403. +
  19404. +#endif
  19405. +
  19406. +
  19407. +#if defined(L_avr32_f32_div) || defined(L_avr32_f32_div_fast)
  19408. + .align 2
  19409. +
  19410. +#if defined(L_avr32_f32_div_fast)
  19411. + .global __avr32_f32_div_fast
  19412. + .type __avr32_f32_div_fast,@function
  19413. +__avr32_f32_div_fast:
  19414. +#else
  19415. + .global __avr32_f32_div
  19416. + .type __avr32_f32_div,@function
  19417. +__avr32_f32_div:
  19418. +#endif
  19419. +
  19420. + eor r8, r11, r12 /* MSB(r8) = Sign(op1) ^ Sign(op2) */
  19421. +
  19422. + /* Unpack */
  19423. + lsl r12,1
  19424. + lsl r11,1
  19425. + breq 4f /* Check op2 for zero */
  19426. +
  19427. + tst r12, r12
  19428. + moveq r9, 0
  19429. + breq 12f
  19430. +
  19431. + /* Unpack op1*/
  19432. + /* exp: r9 */
  19433. + /* sf: r12 */
  19434. + lsr r9, r12, 24
  19435. + breq 11f /*If number is subnormal*/
  19436. + cp r9, 0xff
  19437. + brhs 2f /* Check op1 for NaN or Inf */
  19438. + lsl r12, 7
  19439. + sbr r12, 31 /*Implicit bit*/
  19440. +12:
  19441. +
  19442. + /* Unpack op2*/
  19443. + /* exp: r10 */
  19444. + /* sf: r11 */
  19445. + lsr r10, r11, 24
  19446. + breq 13f /*If number is subnormal*/
  19447. + cp r10, 0xff
  19448. + brhs 3f /* Check op2 for NaN or Inf */
  19449. + lsl r11,7
  19450. + sbr r11, 31 /*Implicit bit*/
  19451. +
  19452. + cp.w r9, 0
  19453. + subfeq r12, 0
  19454. + reteq 0 /* op1 is zero and op2 is not zero */
  19455. + /* or NaN so return zero */
  19456. +
  19457. +14:
  19458. +
  19459. + /* For UC3, store with predecrement is faster than stm */
  19460. + st.w --sp, r5
  19461. + st.d --sp, r6
  19462. +
  19463. + /* Calculate new exponent */
  19464. + sub r9, r10
  19465. + sub r9,-127
  19466. +
  19467. + /* Divide */
  19468. + /* Approximating 1/d with the following recurrence: */
  19469. + /* R[j+1] = R[j]*(2-R[j]*d) */
  19470. + /* Using 2.30 format */
  19471. + /* TWO: r10 */
  19472. + /* d: r5 */
  19473. + /* Multiply result : r6, r7 */
  19474. + /* Initial guess : r11 */
  19475. + /* New approximations : r11 */
  19476. + /* Dividend : r12 */
  19477. +
  19478. + /* Load TWO */
  19479. + mov_imm r10, 0x80000000
  19480. +
  19481. + lsr r12, 2 /* Get significand of Op1 in 2.30 format */
  19482. + lsr r5, r11, 2 /* Get significand of Op2 (=d) in 2.30 format */
  19483. +
  19484. + /* Load initial guess, using look-up table */
  19485. + /* Initial guess is of format 01.XY, where XY is constructed as follows: */
  19486. + /* Let d be of following format: 00.1xy....., then XY=~xy */
  19487. + /* For d=00.100 = 0,5 -> initial guess=01.11 = 1,75 */
  19488. + /* For d=00.101 = 0,625 -> initial guess=01.11 = 1,5 */
  19489. + /* For d=00.110 = 0,75 -> initial guess=01.11 = 1,25 */
  19490. + /* For d=00.111 = 0,875 -> initial guess=01.11 = 1,0 */
  19491. +
  19492. + lsr r11, r10, 1
  19493. + bfextu r6, r5, 27, 2
  19494. + com r6
  19495. + bfins r11, r6, 28, 2
  19496. +
  19497. + /* First approximation */
  19498. + /* r7 = R[j]*d */
  19499. + mulu.d r6, r11, r5
  19500. + /* r7 = 2-R[j]*d */
  19501. + sub r7, r10, r7<<2
  19502. + /* r11 = R[j]*(2-R[j]*d) */
  19503. + mulu.d r6, r11, r7
  19504. + lsl r11, r7, 2
  19505. +
  19506. + /* Second approximation */
  19507. + /* r7 = R[j]*d */
  19508. + mulu.d r6, r11, r5
  19509. + /* r7 = 2-R[j]*d */
  19510. + sub r7, r10, r7<<2
  19511. + /* r11 = R[j]*(2-R[j]*d) */
  19512. + mulu.d r6, r11, r7
  19513. + lsl r11, r7, 2
  19514. +
  19515. + /* Third approximation */
  19516. + /* r7 = R[j]*d */
  19517. + mulu.d r6, r11, r5
  19518. + /* r7 = 2-R[j]*d */
  19519. + sub r7, r10, r7<<2
  19520. + /* r11 = R[j]*(2-R[j]*d) */
  19521. + mulu.d r6, r11, r7
  19522. + lsl r11, r7, 2
  19523. +
  19524. + /* Fourth approximation */
  19525. + /* r7 = R[j]*d */
  19526. + mulu.d r6, r11, r5
  19527. + /* r7 = 2-R[j]*d */
  19528. + sub r7, r10, r7<<2
  19529. + /* r11 = R[j]*(2-R[j]*d) */
  19530. + mulu.d r6, r11, r7
  19531. + lsl r11, r7, 2
  19532. +
  19533. +
  19534. + /* Multiply with dividend to get quotient, r7 = sf(op1)/sf(op2) */
  19535. + mulu.d r6, r11, r12
  19536. +
  19537. + /* Shift by 3 to get result in 1.31 format, as required by the exponent. */
  19538. + /* Note that 1.31 format is already used by the exponent in r9, since */
  19539. + /* a bias of 127 was added to the result exponent, even though the implicit */
  19540. + /* bit was inserted. This gives the exponent an additional bias of 1, which */
  19541. + /* supports 1.31 format. */
  19542. + //lsl r10, r7, 3
  19543. +
  19544. + /* Adjust exponent and mantissa in case the result is of format
  19545. + 0000.1xxx to 0001.xxx*/
  19546. +#if defined(L_avr32_f32_div)
  19547. + lsr r12, 4 /* Scale dividend to 6.26 format to match the
  19548. + result of the multiplication of the divisor and
  19549. + quotient to get the remainder. */
  19550. +#endif
  19551. + bld r7, 31-3
  19552. + breq 0f
  19553. + lsl r7, 1
  19554. + sub r9, 1
  19555. +#if defined(L_avr32_f32_div)
  19556. + lsl r12, 1 /* Scale dividend to 5.27 format to match the
  19557. + result of the multiplication of the divisor and
  19558. + quotient to get the remainder. */
  19559. +#endif
  19560. +0:
  19561. + cp r9, 0
  19562. + brle __avr32_f32_div_res_subnormal /* Result was subnormal. */
  19563. +
  19564. +
  19565. +#if defined(L_avr32_f32_div)
  19566. + /* In order to round correctly we calculate the remainder:
  19567. + Remainder = dividend[r12] - divisor[r5]*quotient[r7]
  19568. + for the case when the quotient is halfway between the round-up
  19569. + value and the round down value. If the remainder then is negative
  19570. + it means that the quotient was to big and that it should not be
  19571. + rounded up, if the remainder is positive the quotient was to small
  19572. + and we need to round up. If the remainder is zero it means that the
  19573. + quotient is exact but since we need to remove the guard bit we should
  19574. + round to even. */
  19575. + andl r7, 0xffe0
  19576. + orl r7, 0x0010
  19577. +
  19578. + /* Now do the multiplication. The quotient has the format 4.28
  19579. + while the divisor has the format 2.30 which gives a result
  19580. + of 6.26 */
  19581. + mulu.d r10, r5, r7
  19582. +
  19583. + /* Check if remainder is positive, negative or equal. */
  19584. + bfextu r5, r7, 5, 1 /* Get parity bit into bit 0 of r5 */
  19585. + cp r10, 0
  19586. +__avr32_f32_div_round_subnormal:
  19587. + cpc r11, r12
  19588. + srlo r11 /* Remainder positive: we need to round up.*/
  19589. + moveq r11, r5 /* Remainder zero: round up if mantissa odd. */
  19590. +#else
  19591. + bfextu r11, r7, 4, 1 /* Get guard bit */
  19592. +#endif
  19593. +
  19594. + /* Pack final result*/
  19595. + lsr r12, r7, 5
  19596. + bfins r12, r9, 23, 8
  19597. + /* For UC3, load with postincrement is faster than ldm */
  19598. + ld.d r6, sp++
  19599. + ld.w r5, sp++
  19600. + bld r8, 31
  19601. + bst r12, 31
  19602. + /* Rounding add. */
  19603. + add r12, r11
  19604. + ret r12
  19605. +
  19606. +__divsf_return_op1:
  19607. + lsl r8, 1
  19608. + ror r12
  19609. + ret r12
  19610. +
  19611. +
  19612. +2:
  19613. + /* Op1 is NaN or inf */
  19614. + retne -1 /* Return NaN if op1 is NaN */
  19615. + /* Op1 is inf check op2 */
  19616. + mov_imm r9, 0xff000000
  19617. + cp r11, r9
  19618. + brlo __divsf_return_op1 /* inf/number gives inf */
  19619. + ret -1 /* The rest gives NaN*/
  19620. +3:
  19621. + /* Op2 is NaN or inf */
  19622. + reteq 0 /* Return zero if number/inf*/
  19623. + ret -1 /* Return NaN*/
  19624. +4:
  19625. + /* Op1 is zero ? */
  19626. + tst r12,r12
  19627. + reteq -1 /* 0.0/0.0 is NaN */
  19628. + /* Op1 is Nan? */
  19629. + lsr r9, r12, 24
  19630. + breq 11f /*If number is subnormal*/
  19631. + cp r9, 0xff
  19632. + brhs 2b /* Check op1 for NaN or Inf */
  19633. + /* Nonzero/0.0 is Inf. Sign bit will be shifted in before returning*/
  19634. + mov_imm r12, 0xff000000
  19635. + rjmp __divsf_return_op1
  19636. +
  19637. +11: /* Op1 was denormal. Fix it. */
  19638. + lsl r12,7
  19639. + clz r9,r12
  19640. + lsl r12,r12,r9
  19641. + rsub r9,r9,1
  19642. + rjmp 12b
  19643. +
  19644. +13: /* Op2 was denormal. Fix it. */
  19645. + lsl r11,7
  19646. + clz r10,r11
  19647. + lsl r11,r11,r10
  19648. + rsub r10,r10,1
  19649. + rjmp 14b
  19650. +
  19651. +
  19652. +__avr32_f32_div_res_subnormal: /* Divide result was subnormal */
  19653. +#if defined(L_avr32_f32_div)
  19654. + /* Check how much we must scale down the mantissa. */
  19655. + neg r9
  19656. + sub r9, -1 /* We do no longer have an implicit bit. */
  19657. + satu r9 >> 0, 5 /* Saturate shift amount to max 32. */
  19658. + /* Scale down quotient */
  19659. + rsub r10, r9, 32
  19660. + lsr r7, r7, r9
  19661. + /* Scale down the dividend to match the scaling of the quotient. */
  19662. + lsl r6, r12, r10 /* Make the divident 64-bit and put the lsw in r6 */
  19663. + lsr r12, r12, r9
  19664. +
  19665. + /* Start performing the same rounding as done for normal numbers
  19666. + but this time we have scaled the quotient and dividend and hence
  19667. + need a little different comparison. */
  19668. + andl r7, 0xffe0
  19669. + orl r7, 0x0010
  19670. +
  19671. + /* Now do the multiplication. The quotient has the format 4.28
  19672. + while the divisor has the format 2.30 which gives a result
  19673. + of 6.26 */
  19674. + mulu.d r10, r5, r7
  19675. +
  19676. + /* Set exponent to 0 */
  19677. + mov r9, 0
  19678. +
  19679. + /* Check if remainder is positive, negative or equal. */
  19680. + bfextu r5, r7, 5, 1 /* Get parity bit into bit 0 of r5 */
  19681. + cp r10, r6
  19682. + rjmp __avr32_f32_div_round_subnormal
  19683. +
  19684. +#else
  19685. + ld.d r6, sp++
  19686. + ld.w r5, sp++
  19687. + /*Flush to zero*/
  19688. + ret 0
  19689. +#endif
  19690. +#endif
  19691. +
  19692. +#ifdef L_avr32_f32_mul
  19693. + .global __avr32_f32_mul
  19694. + .type __avr32_f32_mul,@function
  19695. +
  19696. +
  19697. +__avr32_f32_mul:
  19698. + mov r8, r12
  19699. + eor r12, r11 /* MSB(r8) = Sign(op1) ^ Sign(op2) */
  19700. + andh r12, 0x8000, COH
  19701. +
  19702. + /* arrange operands so that that op1 >= op2 */
  19703. + cbr r8, 31
  19704. + breq __avr32_f32_mul_op1_zero
  19705. + cbr r11, 31
  19706. +
  19707. + /* Put the number with the largest exponent in r10
  19708. + and the number with the smallest exponent in r9 */
  19709. + max r10, r8, r11
  19710. + min r9, r8, r11
  19711. +
  19712. + /* Unpack exponent and mantissa of op1 */
  19713. + lsl r8, r10, 8
  19714. + sbr r8, 31 /* Set implicit bit. */
  19715. + lsr r10, 23
  19716. +
  19717. + /* op1 is NaN or Inf. */
  19718. + cp.w r10, 0xff
  19719. + breq __avr32_f32_mul_op1_nan_or_inf
  19720. +
  19721. + /* Unpack exponent and mantissa of op2 */
  19722. + lsl r11, r9, 8
  19723. + sbr r11, 31 /* Set implicit bit. */
  19724. + lsr r9, 23
  19725. +
  19726. + /* op2 is either zero or subnormal. */
  19727. + breq __avr32_f32_mul_op2_subnormal
  19728. +0:
  19729. + /* Calculate new exponent */
  19730. + add r9,r10
  19731. +
  19732. + /* Do the multiplication */
  19733. + mulu.d r10,r8,r11
  19734. +
  19735. + /* We might need to scale up by two if the MSB of the result is
  19736. + zero. */
  19737. + lsl r8, r11, 1
  19738. + movcc r11, r8
  19739. + subcc r9, 1
  19740. +
  19741. + /* Put the shifted out bits of the mantissa into r10 */
  19742. + lsr r10, 8
  19743. + bfins r10, r11, 24, 8
  19744. +
  19745. + sub r9,(127-1) /* remove extra exponent bias */
  19746. + brle __avr32_f32_mul_res_subnormal
  19747. +
  19748. + /* Check for Inf. */
  19749. + cp.w r9, 0xff
  19750. + brge 1f
  19751. +
  19752. + /* Pack result. */
  19753. + or r12, r12, r11 >> 8
  19754. + bfins r12, r9, 23, 8
  19755. +
  19756. + /* Round */
  19757. +__avr32_f32_mul_round:
  19758. + mov_imm r8, 0x80000000
  19759. + bld r12, 0
  19760. + subne r8, -1
  19761. +
  19762. + cp.w r10, r8
  19763. + subhs r12, -1
  19764. +
  19765. + ret r12
  19766. +
  19767. +1:
  19768. + /* Return Inf */
  19769. + orh r12, 0x7f80
  19770. + ret r12
  19771. +
  19772. +__avr32_f32_mul_op2_subnormal:
  19773. + cbr r11, 31
  19774. + clz r9, r11
  19775. + retcs 0 /* op2 is zero. Return 0 */
  19776. + sub r9, 8
  19777. + lsl r11, r11, r9
  19778. + rsub r9, r9, 1
  19779. +
  19780. + /* Check if op2 is subnormal. */
  19781. + tst r10, r10
  19782. + brne 0b
  19783. +
  19784. + /* op2 is subnormal */
  19785. + cbr r8, 31
  19786. + clz r10, r11
  19787. + retcs 0 /* op1 is zero. Return 0 */
  19788. + lsl r8, r8, r10
  19789. + rsub r10, r10, 1
  19790. +
  19791. + rjmp 0b
  19792. +
  19793. +
  19794. +__avr32_f32_mul_op1_nan_or_inf:
  19795. + /* Check if op1 is NaN, if so return NaN */
  19796. + lsl r11, r8, 1
  19797. + retne -1
  19798. +
  19799. + /* op1 is Inf. */
  19800. + tst r9, r9
  19801. + reteq -1 /* Inf * 0 -> NaN */
  19802. +
  19803. + bfins r12, r10, 23, 8 /* Generate Inf in r12 */
  19804. +
  19805. + /* Check if op2 is Inf. or NaN */
  19806. + lsr r11, r9, 23
  19807. + cp.w r11, 0xff
  19808. + retne r12 /* op2 not Inf or NaN, return Info */
  19809. +
  19810. + lsl r9, 9
  19811. + reteq r12 /* op2 Inf return Inf */
  19812. + ret -1 /* op2 is NaN, return NaN */
  19813. +
  19814. +__avr32_f32_mul_res_subnormal:
  19815. + /* Check if the number is so small that
  19816. + it will be represented with zero. */
  19817. + rsub r9, r9, 9
  19818. + rsub r8, r9, 32
  19819. + retcs 0
  19820. +
  19821. + /* Shift the mantissa into the correct position.*/
  19822. + lsr r9, r11, r9
  19823. + /* Add sign bit. */
  19824. + or r12, r9
  19825. + /* Put the shifted out bits in the most significant part
  19826. + of r8. */
  19827. + lsl r11, r11, r8
  19828. +
  19829. + /* Add all the remainder bits used for rounding into r11 */
  19830. + andh r10, 0x00FF
  19831. + or r10, r11
  19832. + rjmp __avr32_f32_mul_round
  19833. +
  19834. +__avr32_f32_mul_op1_zero:
  19835. + bfextu r10, r11, 23, 8
  19836. + cp.w r10, 0xff
  19837. + retne r12
  19838. + reteq -1
  19839. +
  19840. +#endif
  19841. +
  19842. +
  19843. +#ifdef L_avr32_s32_to_f32
  19844. + .global __avr32_s32_to_f32
  19845. + .type __avr32_s32_to_f32,@function
  19846. +__avr32_s32_to_f32:
  19847. + cp r12, 0
  19848. + reteq r12 /* If zero then return zero float */
  19849. + mov r11, r12 /* Keep the sign */
  19850. + abs r12 /* Compute the absolute value */
  19851. + mov r10, 31 + 127 /* Set the correct exponent */
  19852. +
  19853. + /* Normalize */
  19854. + normalize_sf r10 /*exp*/, r12 /*mant*/, r9 /*scratch*/
  19855. +
  19856. + /* Check for subnormal result */
  19857. + cp.w r10, 0
  19858. + brle __avr32_s32_to_f32_subnormal
  19859. +
  19860. + round_sf r10 /*exp*/, r12 /*mant*/, r9 /*scratch*/
  19861. + pack_sf r12 /*sf*/, r10 /*exp*/, r12 /*mant*/
  19862. + lsl r11, 1
  19863. + ror r12
  19864. + ret r12
  19865. +
  19866. +__avr32_s32_to_f32_subnormal:
  19867. + /* Adjust a subnormal result */
  19868. + adjust_subnormal_sf r12/*sf*/, r10 /*exp*/, r12 /*mant*/, r11/*sign*/, r9 /*scratch*/
  19869. + ret r12
  19870. +
  19871. +#endif
  19872. +
  19873. +#ifdef L_avr32_u32_to_f32
  19874. + .global __avr32_u32_to_f32
  19875. + .type __avr32_u32_to_f32,@function
  19876. +__avr32_u32_to_f32:
  19877. + cp r12, 0
  19878. + reteq r12 /* If zero then return zero float */
  19879. + mov r10, 31 + 127 /* Set the correct exponent */
  19880. +
  19881. + /* Normalize */
  19882. + normalize_sf r10 /*exp*/, r12 /*mant*/, r9 /*scratch*/
  19883. +
  19884. + /* Check for subnormal result */
  19885. + cp.w r10, 0
  19886. + brle __avr32_u32_to_f32_subnormal
  19887. +
  19888. + round_sf r10 /*exp*/, r12 /*mant*/, r9 /*scratch*/
  19889. + pack_sf r12 /*sf*/, r10 /*exp*/, r12 /*mant*/
  19890. + lsr r12,1 /* Sign bit is 0 for unsigned int */
  19891. + ret r12
  19892. +
  19893. +__avr32_u32_to_f32_subnormal:
  19894. + /* Adjust a subnormal result */
  19895. + mov r8, 0
  19896. + adjust_subnormal_sf r12/*sf*/,r10 /*exp*/, r12 /*mant*/,r8/*sign*/, r9 /*scratch*/
  19897. + ret r12
  19898. +
  19899. +
  19900. +#endif
  19901. +
  19902. +
  19903. +#ifdef L_avr32_f32_to_s32
  19904. + .global __avr32_f32_to_s32
  19905. + .type __avr32_f32_to_s32,@function
  19906. +__avr32_f32_to_s32:
  19907. + bfextu r11, r12, 23, 8
  19908. + sub r11,127 /* Fix bias */
  19909. + retlo 0 /* Negative exponent yields zero integer */
  19910. +
  19911. + /* Shift mantissa into correct position */
  19912. + rsub r11,r11,31 /* Shift amount */
  19913. + lsl r10,r12,8 /* Get mantissa */
  19914. + sbr r10,31 /* Add implicit bit */
  19915. + lsr r10,r10,r11 /* Perform shift */
  19916. + lsl r12,1 /* Check sign */
  19917. + retcc r10 /* if positive, we are done */
  19918. + neg r10 /* if negative float, negate result */
  19919. + ret r10
  19920. +
  19921. +#endif
  19922. +
  19923. +#ifdef L_avr32_f32_to_u32
  19924. + .global __avr32_f32_to_u32
  19925. + .type __avr32_f32_to_u32,@function
  19926. +__avr32_f32_to_u32:
  19927. + cp r12,0
  19928. + retmi 0 /* Negative numbers gives 0 */
  19929. + bfextu r11, r12, 23, 8 /* Extract exponent */
  19930. + sub r11,127 /* Fix bias */
  19931. + retlo 0 /* Negative exponent yields zero integer */
  19932. +
  19933. + /* Shift mantissa into correct position */
  19934. + rsub r11,r11,31 /* Shift amount */
  19935. + lsl r12,8 /* Get mantissa */
  19936. + sbr r12,31 /* Add implicit bit */
  19937. + lsr r12,r12,r11 /* Perform shift */
  19938. + ret r12
  19939. +
  19940. +#endif
  19941. +
  19942. +#ifdef L_avr32_f32_to_f64
  19943. + .global __avr32_f32_to_f64
  19944. + .type __avr32_f32_to_f64,@function
  19945. +
  19946. +__avr32_f32_to_f64:
  19947. + lsl r11,r12,1 /* Remove sign bit, keep original value in r12*/
  19948. + moveq r10, 0
  19949. + reteq r11 /* Return zero if input is zero */
  19950. +
  19951. + bfextu r9,r11,24,8 /* Get exponent */
  19952. + cp.w r9,0xff /* check for NaN or inf */
  19953. + breq 0f
  19954. +
  19955. + lsl r11,7 /* Convert sf mantissa to df format */
  19956. + mov r10,0
  19957. +
  19958. + /* Check if implicit bit should be set */
  19959. + cp.w r9, 0
  19960. + subeq r9,-1 /* Adjust exponent if it was 0 */
  19961. + srne r8
  19962. + or r11, r11, r8 << 31 /* Set implicit bit if needed */
  19963. + sub r9,(127-0x3ff) /* Convert exponent to df format exponent */
  19964. +
  19965. + /*We know that low register of mantissa is 0, and will be unaffected by normalization.*/
  19966. + /*We can therefore use the faster normalize_sf function instead of normalize_df.*/
  19967. + normalize_sf r9 /*exp*/, r11 /*mantissa*/, r8 /*scratch*/
  19968. + pack_df r9 /*exp*/, r10, r11 /*mantissa*/, r10, r11 /*df*/
  19969. +
  19970. +__extendsfdf_return_op1:
  19971. + /* Rotate in sign bit */
  19972. + lsl r12, 1
  19973. + ror r11
  19974. + ret r11
  19975. +
  19976. +0:
  19977. + /* Inf or NaN*/
  19978. + mov_imm r10, 0xffe00000
  19979. + lsl r11,8 /* check mantissa */
  19980. + movne r11, -1 /* Return NaN */
  19981. + moveq r11, r10 /* Return inf */
  19982. + mov r10, 0
  19983. + rjmp __extendsfdf_return_op1
  19984. +#endif
  19985. +
  19986. +
  19987. +#ifdef L_avr32_f64_to_f32
  19988. + .global __avr32_f64_to_f32
  19989. + .type __avr32_f64_to_f32,@function
  19990. +
  19991. +__avr32_f64_to_f32:
  19992. + /* Unpack */
  19993. + lsl r9,r11,1 /* Unpack exponent */
  19994. + lsr r9,21
  19995. +
  19996. + reteq 0 /* If exponent is 0 the number is so small
  19997. + that the conversion to single float gives
  19998. + zero */
  19999. +
  20000. + lsl r8,r11,10 /* Adjust mantissa */
  20001. + or r12,r8,r10>>22
  20002. +
  20003. + lsl r10,10 /* Check if there are any remaining bits
  20004. + in the low part of the mantissa.*/
  20005. + neg r10
  20006. + rol r12 /* If there were remaining bits then set lsb
  20007. + of mantissa to 1 */
  20008. +
  20009. + cp r9,0x7ff
  20010. + breq 2f /* Check for NaN or inf */
  20011. +
  20012. + sub r9,(0x3ff-127) /* Adjust bias of exponent */
  20013. + sbr r12,31 /* set the implicit bit.*/
  20014. +
  20015. + cp.w r9, 0 /* Check for subnormal number */
  20016. + brle 3f
  20017. +
  20018. + round_sf r9 /*exp*/, r12 /*mant*/, r10 /*scratch*/
  20019. + pack_sf r12 /*sf*/, r9 /*exp*/, r12 /*mant*/
  20020. +__truncdfsf_return_op1:
  20021. + /* Rotate in sign bit */
  20022. + lsl r11, 1
  20023. + ror r12
  20024. + ret r12
  20025. +
  20026. +2:
  20027. + /* NaN or inf */
  20028. + cbr r12,31 /* clear implicit bit */
  20029. + retne -1 /* Return NaN if mantissa not zero */
  20030. + mov_imm r12, 0x7f800000
  20031. + ret r12 /* Return inf */
  20032. +
  20033. +3: /* Result is subnormal. Adjust it.*/
  20034. + adjust_subnormal_sf r12/*sf*/,r9 /*exp*/, r12 /*mant*/, r11/*sign*/, r10 /*scratch*/
  20035. + ret r12
  20036. +
  20037. +
  20038. +#endif
  20039. +
  20040. +#if defined(L_mulsi3) && defined(__AVR32_NO_MUL__)
  20041. + .global __mulsi3
  20042. + .type __mulsi3,@function
  20043. +
  20044. +__mulsi3:
  20045. + mov r9, 0
  20046. +0:
  20047. + lsr r11, 1
  20048. + addcs r9, r9, r12
  20049. + breq 1f
  20050. + lsl r12, 1
  20051. + rjmp 0b
  20052. +1:
  20053. + ret r9
  20054. +#endif
  20055. diff -Nur gcc-4.4.6.orig/gcc/config/avr32/lib2funcs.S gcc-4.4.6/gcc/config/avr32/lib2funcs.S
  20056. --- gcc-4.4.6.orig/gcc/config/avr32/lib2funcs.S 1970-01-01 01:00:00.000000000 +0100
  20057. +++ gcc-4.4.6/gcc/config/avr32/lib2funcs.S 2011-10-22 19:23:08.524581303 +0200
  20058. @@ -0,0 +1,21 @@
  20059. + .align 4
  20060. + .global __nonlocal_goto
  20061. + .type __nonlocal_goto,@function
  20062. +
  20063. +/* __nonlocal_goto: This function handles nonlocal_goto's in gcc.
  20064. +
  20065. + parameter 0 (r12) = New Frame Pointer
  20066. + parameter 1 (r11) = Address to goto
  20067. + parameter 2 (r10) = New Stack Pointer
  20068. +
  20069. + This function invalidates the return stack, since it returns from a
  20070. + function without using a return instruction.
  20071. +*/
  20072. +__nonlocal_goto:
  20073. + mov r7, r12
  20074. + mov sp, r10
  20075. + frs # Flush return stack
  20076. + mov pc, r11
  20077. +
  20078. +
  20079. +
  20080. diff -Nur gcc-4.4.6.orig/gcc/config/avr32/linux-elf.h gcc-4.4.6/gcc/config/avr32/linux-elf.h
  20081. --- gcc-4.4.6.orig/gcc/config/avr32/linux-elf.h 1970-01-01 01:00:00.000000000 +0100
  20082. +++ gcc-4.4.6/gcc/config/avr32/linux-elf.h 2011-10-22 19:23:08.524581303 +0200
  20083. @@ -0,0 +1,151 @@
  20084. +/*
  20085. + Linux/Elf specific definitions.
  20086. + Copyright 2003-2006 Atmel Corporation.
  20087. +
  20088. + Written by Ronny Pedersen, Atmel Norway, <rpedersen@atmel.com>
  20089. + and H�vard Skinnemoen, Atmel Norway, <hskinnemoen@atmel.com>
  20090. +
  20091. + This file is part of GCC.
  20092. +
  20093. + This program is free software; you can redistribute it and/or modify
  20094. + it under the terms of the GNU General Public License as published by
  20095. + the Free Software Foundation; either version 2 of the License, or
  20096. + (at your option) any later version.
  20097. +
  20098. + This program is distributed in the hope that it will be useful,
  20099. + but WITHOUT ANY WARRANTY; without even the implied warranty of
  20100. + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  20101. + GNU General Public License for more details.
  20102. +
  20103. + You should have received a copy of the GNU General Public License
  20104. + along with this program; if not, write to the Free Software
  20105. + Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. */
  20106. +
  20107. +
  20108. +
  20109. +/* elfos.h should have already been included. Now just override
  20110. + any conflicting definitions and add any extras. */
  20111. +
  20112. +/* Run-time Target Specification. */
  20113. +#undef TARGET_VERSION
  20114. +#define TARGET_VERSION fputs (" (AVR32 GNU/Linux with ELF)", stderr);
  20115. +
  20116. +/* Do not assume anything about header files. */
  20117. +#define NO_IMPLICIT_EXTERN_C
  20118. +
  20119. +/* The GNU C++ standard library requires that these macros be defined. */
  20120. +#undef CPLUSPLUS_CPP_SPEC
  20121. +#define CPLUSPLUS_CPP_SPEC "-D_GNU_SOURCE %(cpp)"
  20122. +
  20123. +/* Now we define the strings used to build the spec file. */
  20124. +#undef LIB_SPEC
  20125. +#define LIB_SPEC \
  20126. + "%{pthread:-lpthread} \
  20127. + %{shared:-lc} \
  20128. + %{!shared:%{profile:-lc_p}%{!profile:-lc}}"
  20129. +
  20130. +/* Provide a STARTFILE_SPEC appropriate for GNU/Linux. Here we add
  20131. + the GNU/Linux magical crtbegin.o file (see crtstuff.c) which
  20132. + provides part of the support for getting C++ file-scope static
  20133. + object constructed before entering `main'. */
  20134. +
  20135. +#undef STARTFILE_SPEC
  20136. +#define STARTFILE_SPEC \
  20137. + "%{!shared: \
  20138. + %{pg:gcrt1.o%s} %{!pg:%{p:gcrt1.o%s} \
  20139. + %{!p:%{profile:gcrt1.o%s} \
  20140. + %{!profile:crt1.o%s}}}} \
  20141. + crti.o%s %{!shared:crtbegin.o%s} %{shared:crtbeginS.o%s}"
  20142. +
  20143. +/* Provide a ENDFILE_SPEC appropriate for GNU/Linux. Here we tack on
  20144. + the GNU/Linux magical crtend.o file (see crtstuff.c) which
  20145. + provides part of the support for getting C++ file-scope static
  20146. + object constructed before entering `main', followed by a normal
  20147. + GNU/Linux "finalizer" file, `crtn.o'. */
  20148. +
  20149. +#undef ENDFILE_SPEC
  20150. +#define ENDFILE_SPEC \
  20151. + "%{!shared:crtend.o%s} %{shared:crtendS.o%s} crtn.o%s"
  20152. +
  20153. +#undef ASM_SPEC
  20154. +#define ASM_SPEC "%{!mno-pic:%{!fno-pic:--pic}} %{mrelax|O*:%{mno-relax|O0|O1: ;:--linkrelax}} %{mcpu=*:-mcpu=%*}"
  20155. +
  20156. +#undef LINK_SPEC
  20157. +#define LINK_SPEC "%{version:-v} \
  20158. + %{static:-Bstatic} \
  20159. + %{shared:-shared} \
  20160. + %{symbolic:-Bsymbolic} \
  20161. + %{rdynamic:-export-dynamic} \
  20162. + %{!dynamic-linker:-dynamic-linker /lib/ld-uClibc.so.0} \
  20163. + %{mrelax|O*:%{mno-relax|O0|O1: ;:--relax}}"
  20164. +
  20165. +#define TARGET_OS_CPP_BUILTINS() LINUX_TARGET_OS_CPP_BUILTINS()
  20166. +
  20167. +/* This is how we tell the assembler that two symbols have the same value. */
  20168. +#define ASM_OUTPUT_DEF(FILE, NAME1, NAME2) \
  20169. + do \
  20170. + { \
  20171. + assemble_name (FILE, NAME1); \
  20172. + fputs (" = ", FILE); \
  20173. + assemble_name (FILE, NAME2); \
  20174. + fputc ('\n', FILE); \
  20175. + } \
  20176. + while (0)
  20177. +
  20178. +
  20179. +
  20180. +#undef CC1_SPEC
  20181. +#define CC1_SPEC "%{profile:-p}"
  20182. +
  20183. +/* Target CPU builtins. */
  20184. +#define TARGET_CPU_CPP_BUILTINS() \
  20185. + do \
  20186. + { \
  20187. + builtin_define ("__avr32__"); \
  20188. + builtin_define ("__AVR32__"); \
  20189. + builtin_define ("__AVR32_LINUX__"); \
  20190. + builtin_define (avr32_part->macro); \
  20191. + builtin_define (avr32_arch->macro); \
  20192. + if (avr32_arch->uarch_type == UARCH_TYPE_AVR32A) \
  20193. + builtin_define ("__AVR32_AVR32A__"); \
  20194. + else \
  20195. + builtin_define ("__AVR32_AVR32B__"); \
  20196. + if (TARGET_UNALIGNED_WORD) \
  20197. + builtin_define ("__AVR32_HAS_UNALIGNED_WORD__"); \
  20198. + if (TARGET_SIMD) \
  20199. + builtin_define ("__AVR32_HAS_SIMD__"); \
  20200. + if (TARGET_DSP) \
  20201. + builtin_define ("__AVR32_HAS_DSP__"); \
  20202. + if (TARGET_RMW) \
  20203. + builtin_define ("__AVR32_HAS_RMW__"); \
  20204. + if (TARGET_BRANCH_PRED) \
  20205. + builtin_define ("__AVR32_HAS_BRANCH_PRED__"); \
  20206. + if (TARGET_FAST_FLOAT) \
  20207. + builtin_define ("__AVR32_FAST_FLOAT__"); \
  20208. + } \
  20209. + while (0)
  20210. +
  20211. +
  20212. +
  20213. +/* Call the function profiler with a given profile label. */
  20214. +#undef FUNCTION_PROFILER
  20215. +#define FUNCTION_PROFILER(STREAM, LABELNO) \
  20216. + do \
  20217. + { \
  20218. + fprintf (STREAM, "\tmov\tlr, lo(mcount)\n\torh\tlr, hi(mcount)\n"); \
  20219. + fprintf (STREAM, "\ticall lr\n"); \
  20220. + } \
  20221. + while (0)
  20222. +
  20223. +#define NO_PROFILE_COUNTERS 1
  20224. +
  20225. +/* For dynamic libraries to work */
  20226. +/* #define PLT_REG_CALL_CLOBBERED 1 */
  20227. +#define AVR32_ALWAYS_PIC 1
  20228. +
  20229. +/* uclibc does not implement sinf, cosf etc. */
  20230. +#undef TARGET_C99_FUNCTIONS
  20231. +#define TARGET_C99_FUNCTIONS 0
  20232. +
  20233. +#define LINK_GCC_C_SEQUENCE_SPEC \
  20234. + "%{static:--start-group} %G %L %{static:--end-group}%{!static:%G}"
  20235. diff -Nur gcc-4.4.6.orig/gcc/config/avr32/predicates.md gcc-4.4.6/gcc/config/avr32/predicates.md
  20236. --- gcc-4.4.6.orig/gcc/config/avr32/predicates.md 1970-01-01 01:00:00.000000000 +0100
  20237. +++ gcc-4.4.6/gcc/config/avr32/predicates.md 2011-10-22 19:23:08.524581303 +0200
  20238. @@ -0,0 +1,422 @@
  20239. +;; AVR32 predicates file.
  20240. +;; Copyright 2003-2006 Atmel Corporation.
  20241. +;;
  20242. +;; Written by Ronny Pedersen, Atmel Norway, <rpedersen@atmel.com>
  20243. +;;
  20244. +;; This file is part of GCC.
  20245. +;;
  20246. +;; This program is free software; you can redistribute it and/or modify
  20247. +;; it under the terms of the GNU General Public License as published by
  20248. +;; the Free Software Foundation; either version 2 of the License, or
  20249. +;; (at your option) any later version.
  20250. +;;
  20251. +;; This program is distributed in the hope that it will be useful,
  20252. +;; but WITHOUT ANY WARRANTY; without even the implied warranty of
  20253. +;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  20254. +;; GNU General Public License for more details.
  20255. +;;
  20256. +;; You should have received a copy of the GNU General Public License
  20257. +;; along with this program; if not, write to the Free Software
  20258. +;; Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
  20259. +
  20260. +
  20261. +;; True if the operand is a memory reference which contains an
  20262. +;; Address consisting of a single pointer register
  20263. +(define_predicate "avr32_indirect_register_operand"
  20264. + (and (match_code "mem")
  20265. + (match_test "register_operand(XEXP(op, 0), SImode)")))
  20266. +
  20267. +
  20268. +
  20269. +;; Address expression with a base pointer offset with
  20270. +;; a register displacement
  20271. +(define_predicate "avr32_indexed_memory_operand"
  20272. + (and (match_code "mem")
  20273. + (match_test "GET_CODE(XEXP(op, 0)) == PLUS"))
  20274. + {
  20275. +
  20276. + rtx op0 = XEXP(XEXP(op, 0), 0);
  20277. + rtx op1 = XEXP(XEXP(op, 0), 1);
  20278. +
  20279. + return ((avr32_address_register_rtx_p (op0, 0)
  20280. + && avr32_legitimate_index_p (GET_MODE(op), op1, 0))
  20281. + || (avr32_address_register_rtx_p (op1, 0)
  20282. + && avr32_legitimate_index_p (GET_MODE(op), op0, 0)));
  20283. +
  20284. + })
  20285. +
  20286. +;; Operand suitable for the ld.sb instruction
  20287. +(define_predicate "load_sb_memory_operand"
  20288. + (ior (match_operand 0 "avr32_indirect_register_operand")
  20289. + (match_operand 0 "avr32_indexed_memory_operand")))
  20290. +
  20291. +
  20292. +;; Operand suitable as operand to insns sign extending QI values
  20293. +(define_predicate "extendqi_operand"
  20294. + (ior (match_operand 0 "load_sb_memory_operand")
  20295. + (match_operand 0 "register_operand")))
  20296. +
  20297. +(define_predicate "post_inc_memory_operand"
  20298. + (and (match_code "mem")
  20299. + (match_test "(GET_CODE(XEXP(op, 0)) == POST_INC)
  20300. + && REG_P(XEXP(XEXP(op, 0), 0))")))
  20301. +
  20302. +(define_predicate "pre_dec_memory_operand"
  20303. + (and (match_code "mem")
  20304. + (match_test "(GET_CODE(XEXP(op, 0)) == PRE_DEC)
  20305. + && REG_P(XEXP(XEXP(op, 0), 0))")))
  20306. +
  20307. +;; Operand suitable for add instructions
  20308. +(define_predicate "avr32_add_operand"
  20309. + (ior (match_operand 0 "register_operand")
  20310. + (and (match_operand 0 "immediate_operand")
  20311. + (match_test "CONST_OK_FOR_CONSTRAINT_P(INTVAL(op), 'I', \"Is21\")"))))
  20312. +
  20313. +;; Operand is a power of two immediate
  20314. +(define_predicate "power_of_two_operand"
  20315. + (match_code "const_int")
  20316. +{
  20317. + HOST_WIDE_INT value = INTVAL (op);
  20318. +
  20319. + return value != 0 && (value & (value - 1)) == 0;
  20320. +})
  20321. +
  20322. +;; Operand is a multiple of 8 immediate
  20323. +(define_predicate "multiple_of_8_operand"
  20324. + (match_code "const_int")
  20325. +{
  20326. + HOST_WIDE_INT value = INTVAL (op);
  20327. +
  20328. + return (value & 0x7) == 0 ;
  20329. +})
  20330. +
  20331. +;; Operand is a multiple of 16 immediate
  20332. +(define_predicate "multiple_of_16_operand"
  20333. + (match_code "const_int")
  20334. +{
  20335. + HOST_WIDE_INT value = INTVAL (op);
  20336. +
  20337. + return (value & 0xf) == 0 ;
  20338. +})
  20339. +
  20340. +;; Operand is a mask used for masking away upper bits of a reg
  20341. +(define_predicate "avr32_mask_upper_bits_operand"
  20342. + (match_code "const_int")
  20343. +{
  20344. + HOST_WIDE_INT value = INTVAL (op) + 1;
  20345. +
  20346. + return value != 1 && value != 0 && (value & (value - 1)) == 0;
  20347. +})
  20348. +
  20349. +
  20350. +;; Operand suitable for mul instructions
  20351. +(define_predicate "avr32_mul_operand"
  20352. + (ior (match_operand 0 "register_operand")
  20353. + (and (match_operand 0 "immediate_operand")
  20354. + (match_test "CONST_OK_FOR_CONSTRAINT_P(INTVAL(op), 'K', \"Ks08\")"))))
  20355. +
  20356. +;; True for logical binary operators.
  20357. +(define_predicate "logical_binary_operator"
  20358. + (match_code "ior,xor,and"))
  20359. +
  20360. +;; True for logical shift operators
  20361. +(define_predicate "logical_shift_operator"
  20362. + (match_code "ashift,lshiftrt"))
  20363. +
  20364. +;; True for shift operand for logical and, or and eor insns
  20365. +(define_predicate "avr32_logical_shift_operand"
  20366. + (and (match_code "ashift,lshiftrt")
  20367. + (ior (and (match_test "GET_CODE(XEXP(op, 1)) == CONST_INT")
  20368. + (match_test "register_operand(XEXP(op, 0), GET_MODE(XEXP(op, 0)))"))
  20369. + (and (match_test "GET_CODE(XEXP(op, 0)) == CONST_INT")
  20370. + (match_test "register_operand(XEXP(op, 1), GET_MODE(XEXP(op, 1)))"))))
  20371. + )
  20372. +
  20373. +
  20374. +;; Predicate for second operand to and, ior and xor insn patterns
  20375. +(define_predicate "avr32_logical_insn_operand"
  20376. + (ior (match_operand 0 "register_operand")
  20377. + (match_operand 0 "avr32_logical_shift_operand"))
  20378. +)
  20379. +
  20380. +
  20381. +;; True for avr32 comparison operators
  20382. +(define_predicate "avr32_comparison_operator"
  20383. + (ior (match_code "eq, ne, gt, ge, lt, le, gtu, geu, ltu, leu")
  20384. + (and (match_code "unspec")
  20385. + (match_test "(XINT(op, 1) == UNSPEC_COND_MI)
  20386. + || (XINT(op, 1) == UNSPEC_COND_PL)"))))
  20387. +
  20388. +(define_predicate "avr32_cond3_comparison_operator"
  20389. + (ior (match_code "eq, ne, ge, lt, geu, ltu")
  20390. + (and (match_code "unspec")
  20391. + (match_test "(XINT(op, 1) == UNSPEC_COND_MI)
  20392. + || (XINT(op, 1) == UNSPEC_COND_PL)"))))
  20393. +
  20394. +;; True for avr32 comparison operand
  20395. +(define_predicate "avr32_comparison_operand"
  20396. + (ior (and (match_code "eq, ne, gt, ge, lt, le, gtu, geu, ltu, leu")
  20397. + (match_test "(CC0_P (XEXP(op,0)) && rtx_equal_p (XEXP(op,1), const0_rtx))"))
  20398. + (and (match_code "unspec")
  20399. + (match_test "(XINT(op, 1) == UNSPEC_COND_MI)
  20400. + || (XINT(op, 1) == UNSPEC_COND_PL)"))))
  20401. +
  20402. +;; True if this is a const_int with one bit set
  20403. +(define_predicate "one_bit_set_operand"
  20404. + (match_code "const_int")
  20405. + {
  20406. + int i;
  20407. + int value;
  20408. + int ones = 0;
  20409. +
  20410. + value = INTVAL(op);
  20411. + for ( i = 0 ; i < 32; i++ ){
  20412. + if ( value & ( 1 << i ) ){
  20413. + ones++;
  20414. + }
  20415. + }
  20416. +
  20417. + return ( ones == 1 );
  20418. + })
  20419. +
  20420. +
  20421. +;; True if this is a const_int with one bit cleared
  20422. +(define_predicate "one_bit_cleared_operand"
  20423. + (match_code "const_int")
  20424. + {
  20425. + int i;
  20426. + int value;
  20427. + int zeroes = 0;
  20428. +
  20429. + value = INTVAL(op);
  20430. + for ( i = 0 ; i < 32; i++ ){
  20431. + if ( !(value & ( 1 << i )) ){
  20432. + zeroes++;
  20433. + }
  20434. + }
  20435. +
  20436. + return ( zeroes == 1 );
  20437. + })
  20438. +
  20439. +
  20440. +;; Immediate all the low 16-bits cleared
  20441. +(define_predicate "avr32_hi16_immediate_operand"
  20442. + (match_code "const_int")
  20443. + {
  20444. + /* If the low 16-bits are zero then this
  20445. + is a hi16 immediate. */
  20446. + return ((INTVAL(op) & 0xffff) == 0);
  20447. + }
  20448. +)
  20449. +
  20450. +;; True if this is a register or immediate operand
  20451. +(define_predicate "register_immediate_operand"
  20452. + (ior (match_operand 0 "register_operand")
  20453. + (match_operand 0 "immediate_operand")))
  20454. +
  20455. +;; True if this is a register or const_int operand
  20456. +(define_predicate "register_const_int_operand"
  20457. + (ior (match_operand 0 "register_operand")
  20458. + (and (match_operand 0 "const_int_operand")
  20459. + (match_operand 0 "immediate_operand"))))
  20460. +
  20461. +;; True if this is a register or const_double operand
  20462. +(define_predicate "register_const_double_operand"
  20463. + (ior (match_operand 0 "register_operand")
  20464. + (match_operand 0 "const_double_operand")))
  20465. +
  20466. +;; True if this is an operand containing a label_ref.
  20467. +(define_predicate "avr32_label_ref_operand"
  20468. + (and (match_code "mem")
  20469. + (match_test "avr32_find_symbol(op)
  20470. + && (GET_CODE(avr32_find_symbol(op)) == LABEL_REF)")))
  20471. +
  20472. +;; True if this is a valid symbol pointing to the constant pool.
  20473. +(define_predicate "avr32_const_pool_operand"
  20474. + (and (match_code "symbol_ref")
  20475. + (match_test "CONSTANT_POOL_ADDRESS_P(op)"))
  20476. + {
  20477. + return (flag_pic ? (!(symbol_mentioned_p (get_pool_constant (op))
  20478. + || label_mentioned_p (get_pool_constant (op)))
  20479. + || avr32_got_mentioned_p(get_pool_constant (op)))
  20480. + : true);
  20481. + }
  20482. +)
  20483. +
  20484. +;; True if this is a memory reference to the constant or mini pool.
  20485. +(define_predicate "avr32_const_pool_ref_operand"
  20486. + (ior (match_operand 0 "avr32_label_ref_operand")
  20487. + (and (match_code "mem")
  20488. + (match_test "avr32_const_pool_operand(XEXP(op,0), GET_MODE(XEXP(op,0)))"))))
  20489. +
  20490. +
  20491. +;; Legal source operand for movti insns
  20492. +(define_predicate "avr32_movti_src_operand"
  20493. + (ior (match_operand 0 "avr32_const_pool_ref_operand")
  20494. + (ior (ior (match_operand 0 "register_immediate_operand")
  20495. + (match_operand 0 "avr32_indirect_register_operand"))
  20496. + (match_operand 0 "post_inc_memory_operand"))))
  20497. +
  20498. +;; Legal destination operand for movti insns
  20499. +(define_predicate "avr32_movti_dst_operand"
  20500. + (ior (ior (match_operand 0 "register_operand")
  20501. + (match_operand 0 "avr32_indirect_register_operand"))
  20502. + (match_operand 0 "pre_dec_memory_operand")))
  20503. +
  20504. +
  20505. +;; True if this is a k12 offseted memory operand.
  20506. +(define_predicate "avr32_k12_memory_operand"
  20507. + (and (match_code "mem")
  20508. + (ior (match_test "REG_P(XEXP(op, 0))")
  20509. + (match_test "GET_CODE(XEXP(op, 0)) == PLUS
  20510. + && REG_P(XEXP(XEXP(op, 0), 0))
  20511. + && (GET_CODE(XEXP(XEXP(op, 0), 1)) == CONST_INT)
  20512. + && (CONST_OK_FOR_CONSTRAINT_P(INTVAL(XEXP(XEXP(op, 0), 0)),
  20513. + 'K', (mode == SImode) ? \"Ks14\" : ((mode == HImode) ? \"Ks13\" : \"Ks12\")))"))))
  20514. +
  20515. +;; True if this is a memory operand with an immediate displacement.
  20516. +(define_predicate "avr32_imm_disp_memory_operand"
  20517. + (and (match_code "mem")
  20518. + (match_test "GET_CODE(XEXP(op, 0)) == PLUS
  20519. + && REG_P(XEXP(XEXP(op, 0), 0))
  20520. + && (GET_CODE(XEXP(XEXP(op, 0), 1)) == CONST_INT)")))
  20521. +
  20522. +;; True if this is a bswap operand.
  20523. +(define_predicate "avr32_bswap_operand"
  20524. + (ior (match_operand 0 "avr32_k12_memory_operand")
  20525. + (match_operand 0 "register_operand")))
  20526. +
  20527. +;; True if this is a valid coprocessor insn memory operand.
  20528. +(define_predicate "avr32_cop_memory_operand"
  20529. + (and (match_operand 0 "memory_operand")
  20530. + (not (match_test "GET_CODE(XEXP(op, 0)) == PLUS
  20531. + && REG_P(XEXP(XEXP(op, 0), 0))
  20532. + && (GET_CODE(XEXP(XEXP(op, 0), 1)) == CONST_INT)
  20533. + && !(CONST_OK_FOR_CONSTRAINT_P(INTVAL(XEXP(XEXP(op, 0), 0)), 'K', \"Ku10\"))"))))
  20534. +
  20535. +;; True if this is a valid source/destination operand.
  20536. +;; for moving values to/from a coprocessor
  20537. +(define_predicate "avr32_cop_move_operand"
  20538. + (ior (match_operand 0 "register_operand")
  20539. + (match_operand 0 "avr32_cop_memory_operand")))
  20540. +
  20541. +
  20542. +;; True if this is a valid extract byte offset for use in
  20543. +;; load extracted index insns.
  20544. +(define_predicate "avr32_extract_shift_operand"
  20545. + (and (match_operand 0 "const_int_operand")
  20546. + (match_test "(INTVAL(op) == 0) || (INTVAL(op) == 8)
  20547. + || (INTVAL(op) == 16) || (INTVAL(op) == 24)")))
  20548. +
  20549. +;; True if this is a valid avr32 symbol operand.
  20550. +(define_predicate "avr32_symbol_operand"
  20551. + (and (match_code "label_ref, symbol_ref, const")
  20552. + (match_test "avr32_find_symbol(op)")))
  20553. +
  20554. +;; True if this is a valid operand for the lda.w and call pseudo insns.
  20555. +(define_predicate "avr32_address_operand"
  20556. + (and (and (match_code "label_ref, symbol_ref")
  20557. + (match_test "avr32_find_symbol(op)"))
  20558. + (ior (match_test "TARGET_HAS_ASM_ADDR_PSEUDOS")
  20559. + (match_test "flag_pic")) ))
  20560. +
  20561. +;; An immediate k16 address operand
  20562. +(define_predicate "avr32_ks16_address_operand"
  20563. + (and (match_operand 0 "address_operand")
  20564. + (ior (match_test "REG_P(op)")
  20565. + (match_test "GET_CODE(op) == PLUS
  20566. + && ((GET_CODE(XEXP(op,0)) == CONST_INT)
  20567. + || (GET_CODE(XEXP(op,1)) == CONST_INT))")) ))
  20568. +
  20569. +;; An offset k16 memory operand
  20570. +(define_predicate "avr32_ks16_memory_operand"
  20571. + (and (match_code "mem")
  20572. + (match_test "avr32_ks16_address_operand (XEXP (op, 0), GET_MODE (XEXP (op, 0)))")))
  20573. +
  20574. +;; An immediate k11 address operand
  20575. +(define_predicate "avr32_ks11_address_operand"
  20576. + (and (match_operand 0 "address_operand")
  20577. + (ior (match_test "REG_P(op)")
  20578. + (match_test "GET_CODE(op) == PLUS
  20579. + && (((GET_CODE(XEXP(op,0)) == CONST_INT)
  20580. + && avr32_const_ok_for_constraint_p(INTVAL(XEXP(op,0)), 'K', \"Ks11\"))
  20581. + || ((GET_CODE(XEXP(op,1)) == CONST_INT)
  20582. + && avr32_const_ok_for_constraint_p(INTVAL(XEXP(op,1)), 'K', \"Ks11\")))")) ))
  20583. +
  20584. +;; True if this is a avr32 call operand
  20585. +(define_predicate "avr32_call_operand"
  20586. + (ior (ior (match_operand 0 "register_operand")
  20587. + (ior (match_operand 0 "avr32_const_pool_ref_operand")
  20588. + (match_operand 0 "avr32_address_operand")))
  20589. + (match_test "SYMBOL_REF_RCALL_FUNCTION_P(op)")))
  20590. +
  20591. +;; Return true for operators performing ALU operations
  20592. +
  20593. +(define_predicate "alu_operator"
  20594. + (match_code "ior, xor, and, plus, minus, ashift, lshiftrt, ashiftrt"))
  20595. +
  20596. +(define_predicate "avr32_add_shift_immediate_operand"
  20597. + (and (match_operand 0 "immediate_operand")
  20598. + (match_test "CONST_OK_FOR_CONSTRAINT_P(INTVAL(op), 'K', \"Ku02\")")))
  20599. +
  20600. +(define_predicate "avr32_cond_register_immediate_operand"
  20601. + (ior (match_operand 0 "register_operand")
  20602. + (and (match_operand 0 "immediate_operand")
  20603. + (match_test "CONST_OK_FOR_CONSTRAINT_P(INTVAL(op), 'K', \"Ks08\")"))))
  20604. +
  20605. +(define_predicate "avr32_cond_immediate_operand"
  20606. + (and (match_operand 0 "immediate_operand")
  20607. + (match_test "CONST_OK_FOR_CONSTRAINT_P(INTVAL(op), 'I', \"Is08\")")))
  20608. +
  20609. +
  20610. +(define_predicate "avr32_cond_move_operand"
  20611. + (ior (ior (match_operand 0 "register_operand")
  20612. + (and (match_operand 0 "immediate_operand")
  20613. + (match_test "CONST_OK_FOR_CONSTRAINT_P(INTVAL(op), 'K', \"Ks08\")")))
  20614. + (and (match_test "TARGET_V2_INSNS")
  20615. + (match_operand 0 "memory_operand"))))
  20616. +
  20617. +(define_predicate "avr32_mov_immediate_operand"
  20618. + (and (match_operand 0 "immediate_operand")
  20619. + (match_test "avr32_const_ok_for_move(INTVAL(op))")))
  20620. +
  20621. +
  20622. +(define_predicate "avr32_rmw_address_operand"
  20623. + (ior (and (match_code "symbol_ref")
  20624. + (match_test "({rtx symbol = avr32_find_symbol(op); \
  20625. + symbol && (GET_CODE (symbol) == SYMBOL_REF) && SYMBOL_REF_RMW_ADDR(symbol);})"))
  20626. + (and (match_operand 0 "immediate_operand")
  20627. + (match_test "CONST_OK_FOR_CONSTRAINT_P(INTVAL(op), 'K', \"Ks17\")")))
  20628. + {
  20629. + return TARGET_RMW && !flag_pic;
  20630. + }
  20631. +)
  20632. +
  20633. +(define_predicate "avr32_rmw_memory_operand"
  20634. + (and (match_code "mem")
  20635. + (match_test "!volatile_refs_p(op) && (GET_MODE(op) == SImode) &&
  20636. + avr32_rmw_address_operand(XEXP(op, 0), GET_MODE(XEXP(op, 0)))")))
  20637. +
  20638. +(define_predicate "avr32_rmw_memory_or_register_operand"
  20639. + (ior (match_operand 0 "avr32_rmw_memory_operand")
  20640. + (match_operand 0 "register_operand")))
  20641. +
  20642. +(define_predicate "avr32_non_rmw_memory_operand"
  20643. + (and (not (match_operand 0 "avr32_rmw_memory_operand"))
  20644. + (match_operand 0 "memory_operand")))
  20645. +
  20646. +(define_predicate "avr32_non_rmw_general_operand"
  20647. + (and (not (match_operand 0 "avr32_rmw_memory_operand"))
  20648. + (match_operand 0 "general_operand")))
  20649. +
  20650. +(define_predicate "avr32_non_rmw_nonimmediate_operand"
  20651. + (and (not (match_operand 0 "avr32_rmw_memory_operand"))
  20652. + (match_operand 0 "nonimmediate_operand")))
  20653. +
  20654. +;; Return true if the operand is the 1.0f constant.
  20655. +
  20656. +(define_predicate "const_1f_operand"
  20657. + (match_code "const_int,const_double")
  20658. +{
  20659. + return (op == CONST1_RTX (SFmode));
  20660. +})
  20661. diff -Nur gcc-4.4.6.orig/gcc/config/avr32/simd.md gcc-4.4.6/gcc/config/avr32/simd.md
  20662. --- gcc-4.4.6.orig/gcc/config/avr32/simd.md 1970-01-01 01:00:00.000000000 +0100
  20663. +++ gcc-4.4.6/gcc/config/avr32/simd.md 2011-10-22 19:23:08.528581303 +0200
  20664. @@ -0,0 +1,145 @@
  20665. +;; AVR32 machine description file for SIMD instructions.
  20666. +;; Copyright 2003-2006 Atmel Corporation.
  20667. +;;
  20668. +;; Written by Ronny Pedersen, Atmel Norway, <rpedersen@atmel.com>
  20669. +;;
  20670. +;; This file is part of GCC.
  20671. +;;
  20672. +;; This program is free software; you can redistribute it and/or modify
  20673. +;; it under the terms of the GNU General Public License as published by
  20674. +;; the Free Software Foundation; either version 2 of the License, or
  20675. +;; (at your option) any later version.
  20676. +;;
  20677. +;; This program is distributed in the hope that it will be useful,
  20678. +;; but WITHOUT ANY WARRANTY; without even the implied warranty of
  20679. +;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  20680. +;; GNU General Public License for more details.
  20681. +;;
  20682. +;; You should have received a copy of the GNU General Public License
  20683. +;; along with this program; if not, write to the Free Software
  20684. +;; Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
  20685. +
  20686. +;; -*- Mode: Scheme -*-
  20687. +
  20688. +
  20689. +;; Vector modes
  20690. +(define_mode_iterator VECM [V2HI V4QI])
  20691. +(define_mode_attr size [(V2HI "h") (V4QI "b")])
  20692. +
  20693. +(define_insn "add<mode>3"
  20694. + [(set (match_operand:VECM 0 "register_operand" "=r")
  20695. + (plus:VECM (match_operand:VECM 1 "register_operand" "r")
  20696. + (match_operand:VECM 2 "register_operand" "r")))]
  20697. + "TARGET_SIMD"
  20698. + "padd.<size>\t%0, %1, %2"
  20699. + [(set_attr "length" "4")
  20700. + (set_attr "type" "alu")])
  20701. +
  20702. +
  20703. +(define_insn "sub<mode>3"
  20704. + [(set (match_operand:VECM 0 "register_operand" "=r")
  20705. + (minus:VECM (match_operand:VECM 1 "register_operand" "r")
  20706. + (match_operand:VECM 2 "register_operand" "r")))]
  20707. + "TARGET_SIMD"
  20708. + "psub.<size>\t%0, %1, %2"
  20709. + [(set_attr "length" "4")
  20710. + (set_attr "type" "alu")])
  20711. +
  20712. +
  20713. +(define_insn "abs<mode>2"
  20714. + [(set (match_operand:VECM 0 "register_operand" "=r")
  20715. + (abs:VECM (match_operand:VECM 1 "register_operand" "r")))]
  20716. + "TARGET_SIMD"
  20717. + "pabs.s<size>\t%0, %1"
  20718. + [(set_attr "length" "4")
  20719. + (set_attr "type" "alu")])
  20720. +
  20721. +(define_insn "ashl<mode>3"
  20722. + [(set (match_operand:VECM 0 "register_operand" "=r")
  20723. + (ashift:VECM (match_operand:VECM 1 "register_operand" "r")
  20724. + (match_operand:SI 2 "immediate_operand" "Ku04")))]
  20725. + "TARGET_SIMD"
  20726. + "plsl.<size>\t%0, %1, %2"
  20727. + [(set_attr "length" "4")
  20728. + (set_attr "type" "alu")])
  20729. +
  20730. +(define_insn "ashr<mode>3"
  20731. + [(set (match_operand:VECM 0 "register_operand" "=r")
  20732. + (ashiftrt:VECM (match_operand:VECM 1 "register_operand" "r")
  20733. + (match_operand:SI 2 "immediate_operand" "Ku04")))]
  20734. + "TARGET_SIMD"
  20735. + "pasr.<size>\t%0, %1, %2"
  20736. + [(set_attr "length" "4")
  20737. + (set_attr "type" "alu")])
  20738. +
  20739. +(define_insn "lshr<mode>3"
  20740. + [(set (match_operand:VECM 0 "register_operand" "=r")
  20741. + (lshiftrt:VECM (match_operand:VECM 1 "register_operand" "r")
  20742. + (match_operand:SI 2 "immediate_operand" "Ku04")))]
  20743. + "TARGET_SIMD"
  20744. + "plsr.<size>\t%0, %1, %2"
  20745. + [(set_attr "length" "4")
  20746. + (set_attr "type" "alu")])
  20747. +
  20748. +(define_insn "smaxv2hi3"
  20749. + [(set (match_operand:V2HI 0 "register_operand" "=r")
  20750. + (smax:V2HI (match_operand:V2HI 1 "register_operand" "r")
  20751. + (match_operand:V2HI 2 "register_operand" "r")))]
  20752. +
  20753. + "TARGET_SIMD"
  20754. + "pmax.sh\t%0, %1, %2"
  20755. + [(set_attr "length" "4")
  20756. + (set_attr "type" "alu")])
  20757. +
  20758. +(define_insn "sminv2hi3"
  20759. + [(set (match_operand:V2HI 0 "register_operand" "=r")
  20760. + (smin:V2HI (match_operand:V2HI 1 "register_operand" "r")
  20761. + (match_operand:V2HI 2 "register_operand" "r")))]
  20762. +
  20763. + "TARGET_SIMD"
  20764. + "pmin.sh\t%0, %1, %2"
  20765. + [(set_attr "length" "4")
  20766. + (set_attr "type" "alu")])
  20767. +
  20768. +(define_insn "umaxv4qi3"
  20769. + [(set (match_operand:V4QI 0 "register_operand" "=r")
  20770. + (umax:V4QI (match_operand:V4QI 1 "register_operand" "r")
  20771. + (match_operand:V4QI 2 "register_operand" "r")))]
  20772. +
  20773. + "TARGET_SIMD"
  20774. + "pmax.ub\t%0, %1, %2"
  20775. + [(set_attr "length" "4")
  20776. + (set_attr "type" "alu")])
  20777. +
  20778. +(define_insn "uminv4qi3"
  20779. + [(set (match_operand:V4QI 0 "register_operand" "=r")
  20780. + (umin:V4QI (match_operand:V4QI 1 "register_operand" "r")
  20781. + (match_operand:V4QI 2 "register_operand" "r")))]
  20782. +
  20783. + "TARGET_SIMD"
  20784. + "pmin.ub\t%0, %1, %2"
  20785. + [(set_attr "length" "4")
  20786. + (set_attr "type" "alu")])
  20787. +
  20788. +
  20789. +(define_insn "addsubv2hi"
  20790. + [(set (match_operand:V2HI 0 "register_operand" "=r")
  20791. + (vec_concat:V2HI
  20792. + (plus:HI (match_operand:HI 1 "register_operand" "r")
  20793. + (match_operand:HI 2 "register_operand" "r"))
  20794. + (minus:HI (match_dup 1) (match_dup 2))))]
  20795. + "TARGET_SIMD"
  20796. + "paddsub.h\t%0, %1:b, %2:b"
  20797. + [(set_attr "length" "4")
  20798. + (set_attr "type" "alu")])
  20799. +
  20800. +(define_insn "subaddv2hi"
  20801. + [(set (match_operand:V2HI 0 "register_operand" "=r")
  20802. + (vec_concat:V2HI
  20803. + (minus:HI (match_operand:HI 1 "register_operand" "r")
  20804. + (match_operand:HI 2 "register_operand" "r"))
  20805. + (plus:HI (match_dup 1) (match_dup 2))))]
  20806. + "TARGET_SIMD"
  20807. + "psubadd.h\t%0, %1:b, %2:b"
  20808. + [(set_attr "length" "4")
  20809. + (set_attr "type" "alu")])
  20810. diff -Nur gcc-4.4.6.orig/gcc/config/avr32/sync.md gcc-4.4.6/gcc/config/avr32/sync.md
  20811. --- gcc-4.4.6.orig/gcc/config/avr32/sync.md 1970-01-01 01:00:00.000000000 +0100
  20812. +++ gcc-4.4.6/gcc/config/avr32/sync.md 2011-10-22 19:23:08.528581303 +0200
  20813. @@ -0,0 +1,244 @@
  20814. +;;=================================================================
  20815. +;; Atomic operations
  20816. +;;=================================================================
  20817. +
  20818. +
  20819. +(define_insn "sync_compare_and_swapsi"
  20820. + [(set (match_operand:SI 0 "register_operand" "=&r,&r")
  20821. + (match_operand:SI 1 "memory_operand" "+RKs16,+RKs16"))
  20822. + (set (match_dup 1)
  20823. + (unspec_volatile:SI
  20824. + [(match_dup 1)
  20825. + (match_operand:SI 2 "register_immediate_operand" "r,Ks21")
  20826. + (match_operand:SI 3 "register_operand" "r,r")]
  20827. + VUNSPEC_SYNC_CMPXCHG)) ]
  20828. + ""
  20829. + "0:
  20830. + ssrf\t5
  20831. + ld.w\t%0,%1
  20832. + cp.w\t%0,%2
  20833. + brne\t0f
  20834. + stcond\t%1, %3
  20835. + brne\t0b
  20836. + 0:
  20837. + "
  20838. + [(set_attr "length" "16,18")
  20839. + (set_attr "cc" "clobber")]
  20840. + )
  20841. +
  20842. +
  20843. +(define_code_iterator atomic_op [plus minus and ior xor])
  20844. +(define_code_attr atomic_asm_insn [(plus "add") (minus "sub") (and "and") (ior "or") (xor "eor")])
  20845. +(define_code_attr atomic_insn [(plus "add") (minus "sub") (and "and") (ior "ior") (xor "xor")])
  20846. +
  20847. +(define_insn "sync_loadsi"
  20848. + ; NB! Put an early clobber on the destination operand to
  20849. + ; avoid gcc using the same register in the source and
  20850. + ; destination. This is done in order to avoid gcc to
  20851. + ; clobber the source operand since these instructions
  20852. + ; are actually inside a "loop".
  20853. + [(set (match_operand:SI 0 "register_operand" "=&r")
  20854. + (unspec_volatile:SI
  20855. + [(match_operand:SI 1 "avr32_ks16_memory_operand" "RKs16")
  20856. + (label_ref (match_operand 2 "" ""))]
  20857. + VUNSPEC_SYNC_SET_LOCK_AND_LOAD) )]
  20858. + ""
  20859. + "%2:
  20860. + ssrf\t5
  20861. + ld.w\t%0,%1"
  20862. + [(set_attr "length" "6")
  20863. + (set_attr "cc" "clobber")]
  20864. + )
  20865. +
  20866. +(define_insn "sync_store_if_lock"
  20867. + [(set (match_operand:SI 0 "avr32_ks16_memory_operand" "=RKs16")
  20868. + (unspec_volatile:SI
  20869. + [(match_operand:SI 1 "register_operand" "r")
  20870. + (label_ref (match_operand 2 "" ""))]
  20871. + VUNSPEC_SYNC_STORE_IF_LOCK) )]
  20872. + ""
  20873. + "stcond\t%0, %1
  20874. + brne\t%2"
  20875. + [(set_attr "length" "6")
  20876. + (set_attr "cc" "clobber")]
  20877. + )
  20878. +
  20879. +
  20880. +(define_expand "sync_<atomic_insn>si"
  20881. + [(set (match_dup 2)
  20882. + (unspec_volatile:SI
  20883. + [(match_operand:SI 0 "avr32_ks16_memory_operand" "")
  20884. + (match_dup 3)]
  20885. + VUNSPEC_SYNC_SET_LOCK_AND_LOAD))
  20886. + (set (match_dup 2)
  20887. + (atomic_op:SI (match_dup 2)
  20888. + (match_operand:SI 1 "register_immediate_operand" "")))
  20889. + (set (match_dup 0)
  20890. + (unspec_volatile:SI
  20891. + [(match_dup 2)
  20892. + (match_dup 3)]
  20893. + VUNSPEC_SYNC_STORE_IF_LOCK) )
  20894. + (use (match_dup 1))
  20895. + (use (match_dup 4))]
  20896. + ""
  20897. + {
  20898. + rtx *mem_expr = &operands[0];
  20899. + rtx ptr_reg;
  20900. + if ( !avr32_ks16_memory_operand (*mem_expr, GET_MODE (*mem_expr)) )
  20901. + {
  20902. + ptr_reg = force_reg (Pmode, XEXP (*mem_expr, 0));
  20903. + XEXP (*mem_expr, 0) = ptr_reg;
  20904. + }
  20905. + else
  20906. + {
  20907. + rtx address = XEXP (*mem_expr, 0);
  20908. + if ( REG_P (address) )
  20909. + ptr_reg = address;
  20910. + else if ( REG_P (XEXP (address, 0)) )
  20911. + ptr_reg = XEXP (address, 0);
  20912. + else
  20913. + ptr_reg = XEXP (address, 1);
  20914. + }
  20915. +
  20916. + operands[2] = gen_reg_rtx (SImode);
  20917. + operands[3] = gen_rtx_LABEL_REF(Pmode, gen_label_rtx ());
  20918. + operands[4] = ptr_reg;
  20919. +
  20920. + }
  20921. + )
  20922. +
  20923. +
  20924. +
  20925. +(define_expand "sync_old_<atomic_insn>si"
  20926. + [(set (match_operand:SI 0 "register_operand" "")
  20927. + (unspec_volatile:SI
  20928. + [(match_operand:SI 1 "avr32_ks16_memory_operand" "")
  20929. + (match_dup 4)]
  20930. + VUNSPEC_SYNC_SET_LOCK_AND_LOAD))
  20931. + (set (match_dup 3)
  20932. + (atomic_op:SI (match_dup 0)
  20933. + (match_operand:SI 2 "register_immediate_operand" "")))
  20934. + (set (match_dup 1)
  20935. + (unspec_volatile:SI
  20936. + [(match_dup 3)
  20937. + (match_dup 4)]
  20938. + VUNSPEC_SYNC_STORE_IF_LOCK) )
  20939. + (use (match_dup 2))
  20940. + (use (match_dup 5))]
  20941. + ""
  20942. + {
  20943. + rtx *mem_expr = &operands[1];
  20944. + rtx ptr_reg;
  20945. + if ( !avr32_ks16_memory_operand (*mem_expr, GET_MODE (*mem_expr)) )
  20946. + {
  20947. + ptr_reg = force_reg (Pmode, XEXP (*mem_expr, 0));
  20948. + XEXP (*mem_expr, 0) = ptr_reg;
  20949. + }
  20950. + else
  20951. + {
  20952. + rtx address = XEXP (*mem_expr, 0);
  20953. + if ( REG_P (address) )
  20954. + ptr_reg = address;
  20955. + else if ( REG_P (XEXP (address, 0)) )
  20956. + ptr_reg = XEXP (address, 0);
  20957. + else
  20958. + ptr_reg = XEXP (address, 1);
  20959. + }
  20960. +
  20961. + operands[3] = gen_reg_rtx (SImode);
  20962. + operands[4] = gen_rtx_LABEL_REF(Pmode, gen_label_rtx ());
  20963. + operands[5] = ptr_reg;
  20964. + }
  20965. + )
  20966. +
  20967. +(define_expand "sync_new_<atomic_insn>si"
  20968. + [(set (match_operand:SI 0 "register_operand" "")
  20969. + (unspec_volatile:SI
  20970. + [(match_operand:SI 1 "avr32_ks16_memory_operand" "")
  20971. + (match_dup 3)]
  20972. + VUNSPEC_SYNC_SET_LOCK_AND_LOAD))
  20973. + (set (match_dup 0)
  20974. + (atomic_op:SI (match_dup 0)
  20975. + (match_operand:SI 2 "register_immediate_operand" "")))
  20976. + (set (match_dup 1)
  20977. + (unspec_volatile:SI
  20978. + [(match_dup 0)
  20979. + (match_dup 3)]
  20980. + VUNSPEC_SYNC_STORE_IF_LOCK) )
  20981. + (use (match_dup 2))
  20982. + (use (match_dup 4))]
  20983. + ""
  20984. + {
  20985. + rtx *mem_expr = &operands[1];
  20986. + rtx ptr_reg;
  20987. + if ( !avr32_ks16_memory_operand (*mem_expr, GET_MODE (*mem_expr)) )
  20988. + {
  20989. + ptr_reg = force_reg (Pmode, XEXP (*mem_expr, 0));
  20990. + XEXP (*mem_expr, 0) = ptr_reg;
  20991. + }
  20992. + else
  20993. + {
  20994. + rtx address = XEXP (*mem_expr, 0);
  20995. + if ( REG_P (address) )
  20996. + ptr_reg = address;
  20997. + else if ( REG_P (XEXP (address, 0)) )
  20998. + ptr_reg = XEXP (address, 0);
  20999. + else
  21000. + ptr_reg = XEXP (address, 1);
  21001. + }
  21002. +
  21003. + operands[3] = gen_rtx_LABEL_REF(Pmode, gen_label_rtx ());
  21004. + operands[4] = ptr_reg;
  21005. + }
  21006. + )
  21007. +
  21008. +
  21009. +;(define_insn "sync_<atomic_insn>si"
  21010. +; [(set (match_operand:SI 0 "memory_operand" "+RKs16")
  21011. +; (unspec_volatile:SI
  21012. +; [(atomic_op:SI (match_dup 0)
  21013. +; (match_operand:SI 1 "register_operand" "r"))]
  21014. +; VUNSPEC_SYNC_CMPXCHG))
  21015. +; (clobber (match_scratch:SI 2 "=&r"))]
  21016. +; ""
  21017. +; "0:
  21018. +; ssrf\t5
  21019. +; ld.w\t%2,%0
  21020. +; <atomic_asm_insn>\t%2,%1
  21021. +; stcond\t%0, %2
  21022. +; brne\t0b
  21023. +; "
  21024. +; [(set_attr "length" "14")
  21025. +; (set_attr "cc" "clobber")]
  21026. +; )
  21027. +;
  21028. +;(define_insn "sync_new_<atomic_insn>si"
  21029. +; [(set (match_operand:SI 1 "memory_operand" "+RKs16")
  21030. +; (unspec_volatile:SI
  21031. +; [(atomic_op:SI (match_dup 1)
  21032. +; (match_operand:SI 2 "register_operand" "r"))]
  21033. +; VUNSPEC_SYNC_CMPXCHG))
  21034. +; (set (match_operand:SI 0 "register_operand" "=&r")
  21035. +; (atomic_op:SI (match_dup 1)
  21036. +; (match_dup 2)))]
  21037. +; ""
  21038. +; "0:
  21039. +; ssrf\t5
  21040. +; ld.w\t%0,%1
  21041. +; <atomic_asm_insn>\t%0,%2
  21042. +; stcond\t%1, %0
  21043. +; brne\t0b
  21044. +; "
  21045. +; [(set_attr "length" "14")
  21046. +; (set_attr "cc" "clobber")]
  21047. +; )
  21048. +
  21049. +(define_insn "sync_lock_test_and_setsi"
  21050. + [ (set (match_operand:SI 0 "register_operand" "=&r")
  21051. + (match_operand:SI 1 "memory_operand" "+RKu00"))
  21052. + (set (match_dup 1)
  21053. + (match_operand:SI 2 "register_operand" "r")) ]
  21054. + ""
  21055. + "xchg\t%0, %p1, %2"
  21056. + [(set_attr "length" "4")]
  21057. + )
  21058. diff -Nur gcc-4.4.6.orig/gcc/config/avr32/t-avr32 gcc-4.4.6/gcc/config/avr32/t-avr32
  21059. --- gcc-4.4.6.orig/gcc/config/avr32/t-avr32 1970-01-01 01:00:00.000000000 +0100
  21060. +++ gcc-4.4.6/gcc/config/avr32/t-avr32 2011-10-22 19:23:08.528581303 +0200
  21061. @@ -0,0 +1,118 @@
  21062. +
  21063. +MD_INCLUDES= $(srcdir)/config/avr32/avr32.md \
  21064. + $(srcdir)/config/avr32/sync.md \
  21065. + $(srcdir)/config/avr32/simd.md \
  21066. + $(srcdir)/config/avr32/predicates.md
  21067. +
  21068. +s-config s-conditions s-flags s-codes s-constants s-emit s-recog s-preds \
  21069. + s-opinit s-extract s-peep s-attr s-attrtab s-output: $(MD_INCLUDES)
  21070. +
  21071. +# We want fine grained libraries, so use the new code
  21072. +# to build the floating point emulation libraries.
  21073. +FPBIT = fp-bit.c
  21074. +DPBIT = dp-bit.c
  21075. +
  21076. +LIB1ASMSRC = avr32/lib1funcs.S
  21077. +LIB1ASMFUNCS = _avr32_f64_mul _avr32_f64_mul_fast _avr32_f64_addsub _avr32_f64_addsub_fast _avr32_f64_to_u32 \
  21078. + _avr32_f64_to_s32 _avr32_f64_to_u64 _avr32_f64_to_s64 _avr32_u32_to_f64 \
  21079. + _avr32_s32_to_f64 _avr32_f64_cmp_eq _avr32_f64_cmp_ge _avr32_f64_cmp_lt \
  21080. + _avr32_f32_cmp_eq _avr32_f32_cmp_ge _avr32_f32_cmp_lt _avr32_f64_div _avr32_f64_div_fast \
  21081. + _avr32_f32_div _avr32_f32_div_fast _avr32_f32_addsub _avr32_f32_addsub_fast \
  21082. + _avr32_f32_mul _avr32_s32_to_f32 _avr32_u32_to_f32 _avr32_f32_to_s32 \
  21083. + _avr32_f32_to_u32 _avr32_f32_to_f64 _avr32_f64_to_f32 _mulsi3
  21084. +
  21085. +#LIB2FUNCS_EXTRA += $(srcdir)/config/avr32/lib2funcs.S
  21086. +
  21087. +MULTILIB_OPTIONS = march=ap/march=ucr1/march=ucr2/march=ucr2nomul/march=ucr3/march=ucr3fp
  21088. +MULTILIB_DIRNAMES = ap ucr1 ucr2 ucr2nomul ucr3 ucr3fp
  21089. +MULTILIB_EXCEPTIONS =
  21090. +MULTILIB_MATCHES += march?ap=mpart?ap7000
  21091. +MULTILIB_MATCHES += march?ap=mpart?ap7001
  21092. +MULTILIB_MATCHES += march?ap=mpart?ap7002
  21093. +MULTILIB_MATCHES += march?ap=mpart?ap7200
  21094. +MULTILIB_MATCHES += march?ucr1=march?uc
  21095. +MULTILIB_MATCHES += march?ucr1=mpart?uc3a0512es
  21096. +MULTILIB_MATCHES += march?ucr2=mpart?uc3a0128
  21097. +MULTILIB_MATCHES += march?ucr2=mpart?uc3a0256
  21098. +MULTILIB_MATCHES += march?ucr2=mpart?uc3a0512
  21099. +MULTILIB_MATCHES += march?ucr2=mpart?uc3a1128
  21100. +MULTILIB_MATCHES += march?ucr2=mpart?uc3a1256
  21101. +MULTILIB_MATCHES += march?ucr1=mpart?uc3a1512es
  21102. +MULTILIB_MATCHES += march?ucr2=mpart?uc3a1512
  21103. +MULTILIB_MATCHES += march?ucr2nomul=mpart?uc3a3revd
  21104. +MULTILIB_MATCHES += march?ucr2=mpart?uc3a364
  21105. +MULTILIB_MATCHES += march?ucr2=mpart?uc3a364s
  21106. +MULTILIB_MATCHES += march?ucr2=mpart?uc3a3128
  21107. +MULTILIB_MATCHES += march?ucr2=mpart?uc3a3128s
  21108. +MULTILIB_MATCHES += march?ucr2=mpart?uc3a3256
  21109. +MULTILIB_MATCHES += march?ucr2=mpart?uc3a3256s
  21110. +MULTILIB_MATCHES += march?ucr2=mpart?uc3a464
  21111. +MULTILIB_MATCHES += march?ucr2=mpart?uc3a464s
  21112. +MULTILIB_MATCHES += march?ucr2=mpart?uc3a4128
  21113. +MULTILIB_MATCHES += march?ucr2=mpart?uc3a4128s
  21114. +MULTILIB_MATCHES += march?ucr2=mpart?uc3a4256
  21115. +MULTILIB_MATCHES += march?ucr2=mpart?uc3a4256s
  21116. +MULTILIB_MATCHES += march?ucr1=mpart?uc3b064
  21117. +MULTILIB_MATCHES += march?ucr1=mpart?uc3b0128
  21118. +MULTILIB_MATCHES += march?ucr1=mpart?uc3b0256es
  21119. +MULTILIB_MATCHES += march?ucr1=mpart?uc3b0256
  21120. +MULTILIB_MATCHES += march?ucr2=mpart?uc3b0512
  21121. +MULTILIB_MATCHES += march?ucr2=mpart?uc3b0512revc
  21122. +MULTILIB_MATCHES += march?ucr1=mpart?uc3b164
  21123. +MULTILIB_MATCHES += march?ucr1=mpart?uc3b1128
  21124. +MULTILIB_MATCHES += march?ucr1=mpart?uc3b1256es
  21125. +MULTILIB_MATCHES += march?ucr1=mpart?uc3b1256
  21126. +MULTILIB_MATCHES += march?ucr2=mpart?uc3b1512
  21127. +MULTILIB_MATCHES += march?ucr2=mpart?uc3b1512revc
  21128. +MULTILIB_MATCHES += march?ucr3=mpart?uc64d3
  21129. +MULTILIB_MATCHES += march?ucr3=mpart?uc128d3
  21130. +MULTILIB_MATCHES += march?ucr3=mpart?uc64d4
  21131. +MULTILIB_MATCHES += march?ucr3=mpart?uc128d4
  21132. +MULTILIB_MATCHES += march?ucr3=mpart?uc3c0512crevc
  21133. +MULTILIB_MATCHES += march?ucr3=mpart?uc3c1512crevc
  21134. +MULTILIB_MATCHES += march?ucr3=mpart?uc3c2512crevc
  21135. +MULTILIB_MATCHES += march?ucr3=mpart?uc3l0256
  21136. +MULTILIB_MATCHES += march?ucr3=mpart?uc3l0128
  21137. +MULTILIB_MATCHES += march?ucr3=mpart?uc3l064
  21138. +MULTILIB_MATCHES += march?ucr3=mpart?uc3l032
  21139. +MULTILIB_MATCHES += march?ucr3=mpart?uc3l016
  21140. +MULTILIB_MATCHES += march?ucr3=mpart?uc3l064revb
  21141. +MULTILIB_MATCHES += march?ucr3=mpart?uc64l3u
  21142. +MULTILIB_MATCHES += march?ucr3=mpart?uc128l3u
  21143. +MULTILIB_MATCHES += march?ucr3=mpart?uc256l3u
  21144. +MULTILIB_MATCHES += march?ucr3=mpart?uc64l4u
  21145. +MULTILIB_MATCHES += march?ucr3=mpart?uc128l4u
  21146. +MULTILIB_MATCHES += march?ucr3=mpart?uc256l4u
  21147. +MULTILIB_MATCHES += march?ucr3fp=mpart?uc3c064c
  21148. +MULTILIB_MATCHES += march?ucr3fp=mpart?uc3c0128c
  21149. +MULTILIB_MATCHES += march?ucr3fp=mpart?uc3c0256c
  21150. +MULTILIB_MATCHES += march?ucr3fp=mpart?uc3c0512c
  21151. +MULTILIB_MATCHES += march?ucr3fp=mpart?uc3c164c
  21152. +MULTILIB_MATCHES += march?ucr3fp=mpart?uc3c1128c
  21153. +MULTILIB_MATCHES += march?ucr3fp=mpart?uc3c1256c
  21154. +MULTILIB_MATCHES += march?ucr3fp=mpart?uc3c1512c
  21155. +MULTILIB_MATCHES += march?ucr3fp=mpart?uc3c264c
  21156. +MULTILIB_MATCHES += march?ucr3fp=mpart?uc3c2128c
  21157. +MULTILIB_MATCHES += march?ucr3fp=mpart?uc3c2256c
  21158. +MULTILIB_MATCHES += march?ucr3fp=mpart?uc3c2512c
  21159. +MULTILIB_MATCHES += march?ucr3=mpart?mxt768e
  21160. +
  21161. +
  21162. +EXTRA_MULTILIB_PARTS = crtbegin.o crtbeginS.o crtend.o crtendS.o crti.o crtn.o
  21163. +
  21164. +CRTSTUFF_T_CFLAGS = -mrelax
  21165. +CRTSTUFF_T_CFLAGS_S = -mrelax -fPIC
  21166. +TARGET_LIBGCC2_CFLAGS += -mrelax
  21167. +
  21168. +LIBGCC = stmp-multilib
  21169. +INSTALL_LIBGCC = install-multilib
  21170. +
  21171. +fp-bit.c: $(srcdir)/config/fp-bit.c
  21172. + echo '#define FLOAT' > fp-bit.c
  21173. + cat $(srcdir)/config/fp-bit.c >> fp-bit.c
  21174. +
  21175. +dp-bit.c: $(srcdir)/config/fp-bit.c
  21176. + cat $(srcdir)/config/fp-bit.c > dp-bit.c
  21177. +
  21178. +
  21179. +
  21180. diff -Nur gcc-4.4.6.orig/gcc/config/avr32/t-avr32-linux gcc-4.4.6/gcc/config/avr32/t-avr32-linux
  21181. --- gcc-4.4.6.orig/gcc/config/avr32/t-avr32-linux 1970-01-01 01:00:00.000000000 +0100
  21182. +++ gcc-4.4.6/gcc/config/avr32/t-avr32-linux 2011-10-22 19:23:08.528581303 +0200
  21183. @@ -0,0 +1,118 @@
  21184. +
  21185. +MD_INCLUDES= $(srcdir)/config/avr32/avr32.md \
  21186. + $(srcdir)/config/avr32/sync.md \
  21187. + $(srcdir)/config/avr32/simd.md \
  21188. + $(srcdir)/config/avr32/predicates.md
  21189. +
  21190. +s-config s-conditions s-flags s-codes s-constants s-emit s-recog s-preds \
  21191. + s-opinit s-extract s-peep s-attr s-attrtab s-output: $(MD_INCLUDES)
  21192. +
  21193. +# We want fine grained libraries, so use the new code
  21194. +# to build the floating point emulation libraries.
  21195. +FPBIT = fp-bit.c
  21196. +DPBIT = dp-bit.c
  21197. +
  21198. +LIB1ASMSRC = avr32/lib1funcs.S
  21199. +LIB1ASMFUNCS = _avr32_f64_mul _avr32_f64_mul_fast _avr32_f64_addsub _avr32_f64_addsub_fast _avr32_f64_to_u32 \
  21200. + _avr32_f64_to_s32 _avr32_f64_to_u64 _avr32_f64_to_s64 _avr32_u32_to_f64 \
  21201. + _avr32_s32_to_f64 _avr32_f64_cmp_eq _avr32_f64_cmp_ge _avr32_f64_cmp_lt \
  21202. + _avr32_f32_cmp_eq _avr32_f32_cmp_ge _avr32_f32_cmp_lt _avr32_f64_div _avr32_f64_div_fast \
  21203. + _avr32_f32_div _avr32_f32_div_fast _avr32_f32_addsub _avr32_f32_addsub_fast \
  21204. + _avr32_f32_mul _avr32_s32_to_f32 _avr32_u32_to_f32 _avr32_f32_to_s32 \
  21205. + _avr32_f32_to_u32 _avr32_f32_to_f64 _avr32_f64_to_f32 _mulsi3
  21206. +
  21207. +#LIB2FUNCS_EXTRA += $(srcdir)/config/avr32/lib2funcs.S
  21208. +
  21209. +MULTILIB_OPTIONS = march=ap/march=ucr1/march=ucr2/march=ucr2nomul/march=ucr3/march=ucr3fp
  21210. +MULTILIB_DIRNAMES = ap ucr1 ucr2 ucr2nomul ucr3 ucr3fp
  21211. +MULTILIB_EXCEPTIONS =
  21212. +MULTILIB_MATCHES += march?ap=mpart?ap7000
  21213. +MULTILIB_MATCHES += march?ap=mpart?ap7001
  21214. +MULTILIB_MATCHES += march?ap=mpart?ap7002
  21215. +MULTILIB_MATCHES += march?ap=mpart?ap7200
  21216. +MULTILIB_MATCHES += march?ucr1=march?uc
  21217. +MULTILIB_MATCHES += march?ucr1=mpart?uc3a0512es
  21218. +MULTILIB_MATCHES += march?ucr2=mpart?uc3a0128
  21219. +MULTILIB_MATCHES += march?ucr2=mpart?uc3a0256
  21220. +MULTILIB_MATCHES += march?ucr2=mpart?uc3a0512
  21221. +MULTILIB_MATCHES += march?ucr2=mpart?uc3a1128
  21222. +MULTILIB_MATCHES += march?ucr2=mpart?uc3a1256
  21223. +MULTILIB_MATCHES += march?ucr1=mpart?uc3a1512es
  21224. +MULTILIB_MATCHES += march?ucr2=mpart?uc3a1512
  21225. +MULTILIB_MATCHES += march?ucr2nomul=mpart?uc3a3revd
  21226. +MULTILIB_MATCHES += march?ucr2=mpart?uc3a364
  21227. +MULTILIB_MATCHES += march?ucr2=mpart?uc3a364s
  21228. +MULTILIB_MATCHES += march?ucr2=mpart?uc3a3128
  21229. +MULTILIB_MATCHES += march?ucr2=mpart?uc3a3128s
  21230. +MULTILIB_MATCHES += march?ucr2=mpart?uc3a3256
  21231. +MULTILIB_MATCHES += march?ucr2=mpart?uc3a3256s
  21232. +MULTILIB_MATCHES += march?ucr2=mpart?uc3a464
  21233. +MULTILIB_MATCHES += march?ucr2=mpart?uc3a464s
  21234. +MULTILIB_MATCHES += march?ucr2=mpart?uc3a4128
  21235. +MULTILIB_MATCHES += march?ucr2=mpart?uc3a4128s
  21236. +MULTILIB_MATCHES += march?ucr2=mpart?uc3a4256
  21237. +MULTILIB_MATCHES += march?ucr2=mpart?uc3a4256s
  21238. +MULTILIB_MATCHES += march?ucr1=mpart?uc3b064
  21239. +MULTILIB_MATCHES += march?ucr1=mpart?uc3b0128
  21240. +MULTILIB_MATCHES += march?ucr1=mpart?uc3b0256es
  21241. +MULTILIB_MATCHES += march?ucr1=mpart?uc3b0256
  21242. +MULTILIB_MATCHES += march?ucr2=mpart?uc3b0512
  21243. +MULTILIB_MATCHES += march?ucr2=mpart?uc3b0512revc
  21244. +MULTILIB_MATCHES += march?ucr1=mpart?uc3b164
  21245. +MULTILIB_MATCHES += march?ucr1=mpart?uc3b1128
  21246. +MULTILIB_MATCHES += march?ucr1=mpart?uc3b1256es
  21247. +MULTILIB_MATCHES += march?ucr1=mpart?uc3b1256
  21248. +MULTILIB_MATCHES += march?ucr2=mpart?uc3b1512
  21249. +MULTILIB_MATCHES += march?ucr2=mpart?uc3b1512revc
  21250. +MULTILIB_MATCHES += march?ucr3=mpart?uc64d3
  21251. +MULTILIB_MATCHES += march?ucr3=mpart?uc128d3
  21252. +MULTILIB_MATCHES += march?ucr3=mpart?uc64d4
  21253. +MULTILIB_MATCHES += march?ucr3=mpart?uc128d4
  21254. +MULTILIB_MATCHES += march?ucr3=mpart?uc3c0512crevc
  21255. +MULTILIB_MATCHES += march?ucr3=mpart?uc3c1512crevc
  21256. +MULTILIB_MATCHES += march?ucr3=mpart?uc3c2512crevc
  21257. +MULTILIB_MATCHES += march?ucr3=mpart?uc3l0256
  21258. +MULTILIB_MATCHES += march?ucr3=mpart?uc3l0128
  21259. +MULTILIB_MATCHES += march?ucr3=mpart?uc3l064
  21260. +MULTILIB_MATCHES += march?ucr3=mpart?uc3l032
  21261. +MULTILIB_MATCHES += march?ucr3=mpart?uc3l016
  21262. +MULTILIB_MATCHES += march?ucr3=mpart?uc3l064revb
  21263. +MULTILIB_MATCHES += march?ucr3=mpart?uc64l3u
  21264. +MULTILIB_MATCHES += march?ucr3=mpart?uc128l3u
  21265. +MULTILIB_MATCHES += march?ucr3=mpart?uc256l3u
  21266. +MULTILIB_MATCHES += march?ucr3=mpart?uc64l4u
  21267. +MULTILIB_MATCHES += march?ucr3=mpart?uc128l4u
  21268. +MULTILIB_MATCHES += march?ucr3=mpart?uc256l4u
  21269. +MULTILIB_MATCHES += march?ucr3fp=mpart?uc3c064c
  21270. +MULTILIB_MATCHES += march?ucr3fp=mpart?uc3c0128c
  21271. +MULTILIB_MATCHES += march?ucr3fp=mpart?uc3c0256c
  21272. +MULTILIB_MATCHES += march?ucr3fp=mpart?uc3c0512c
  21273. +MULTILIB_MATCHES += march?ucr3fp=mpart?uc3c164c
  21274. +MULTILIB_MATCHES += march?ucr3fp=mpart?uc3c1128c
  21275. +MULTILIB_MATCHES += march?ucr3fp=mpart?uc3c1256c
  21276. +MULTILIB_MATCHES += march?ucr3fp=mpart?uc3c1512c
  21277. +MULTILIB_MATCHES += march?ucr3fp=mpart?uc3c264c
  21278. +MULTILIB_MATCHES += march?ucr3fp=mpart?uc3c2128c
  21279. +MULTILIB_MATCHES += march?ucr3fp=mpart?uc3c2256c
  21280. +MULTILIB_MATCHES += march?ucr3fp=mpart?uc3c2512c
  21281. +MULTILIB_MATCHES += march?ucr3=mpart?mxt768e
  21282. +
  21283. +
  21284. +EXTRA_MULTILIB_PARTS = crtbegin.o crtbeginS.o crtend.o crtendS.o
  21285. +
  21286. +CRTSTUFF_T_CFLAGS = -mrelax
  21287. +CRTSTUFF_T_CFLAGS_S = -mrelax -fPIC
  21288. +TARGET_LIBGCC2_CFLAGS += -mrelax
  21289. +
  21290. +LIBGCC = stmp-multilib
  21291. +INSTALL_LIBGCC = install-multilib
  21292. +
  21293. +fp-bit.c: $(srcdir)/config/fp-bit.c
  21294. + echo '#define FLOAT' > fp-bit.c
  21295. + cat $(srcdir)/config/fp-bit.c >> fp-bit.c
  21296. +
  21297. +dp-bit.c: $(srcdir)/config/fp-bit.c
  21298. + cat $(srcdir)/config/fp-bit.c > dp-bit.c
  21299. +
  21300. +
  21301. +
  21302. diff -Nur gcc-4.4.6.orig/gcc/config/avr32/t-elf gcc-4.4.6/gcc/config/avr32/t-elf
  21303. --- gcc-4.4.6.orig/gcc/config/avr32/t-elf 1970-01-01 01:00:00.000000000 +0100
  21304. +++ gcc-4.4.6/gcc/config/avr32/t-elf 2011-10-22 19:23:08.528581303 +0200
  21305. @@ -0,0 +1,16 @@
  21306. +
  21307. +# Assemble startup files.
  21308. +$(T)crti.o: $(srcdir)/config/avr32/crti.asm $(GCC_PASSES)
  21309. + $(GCC_FOR_TARGET) $(CRTSTUFF_CFLAGS) $(CRTSTUFF_T_CFLAGS) $(INCLUDES) \
  21310. + -c -o $(T)crti.o -x assembler-with-cpp $(srcdir)/config/avr32/crti.asm
  21311. +
  21312. +$(T)crtn.o: $(srcdir)/config/avr32/crtn.asm $(GCC_PASSES)
  21313. + $(GCC_FOR_TARGET) $(CRTSTUFF_CFLAGS) $(CRTSTUFF_T_CFLAGS) $(INCLUDES) \
  21314. + -c -o $(T)crtn.o -x assembler-with-cpp $(srcdir)/config/avr32/crtn.asm
  21315. +
  21316. +
  21317. +# Build the libraries for both hard and soft floating point
  21318. +EXTRA_MULTILIB_PARTS = crtbegin.o crtbeginS.o crtend.o crtendS.o crti.o crtn.o
  21319. +
  21320. +LIBGCC = stmp-multilib
  21321. +INSTALL_LIBGCC = install-multilib
  21322. diff -Nur gcc-4.4.6.orig/gcc/config/avr32/uc3fpu.md gcc-4.4.6/gcc/config/avr32/uc3fpu.md
  21323. --- gcc-4.4.6.orig/gcc/config/avr32/uc3fpu.md 1970-01-01 01:00:00.000000000 +0100
  21324. +++ gcc-4.4.6/gcc/config/avr32/uc3fpu.md 2011-10-22 19:23:08.528581303 +0200
  21325. @@ -0,0 +1,199 @@
  21326. +;; AVR32 machine description file for Floating-Point instructions.
  21327. +;; Copyright 2003-2006 Atmel Corporation.
  21328. +;;
  21329. +;;
  21330. +;; This file is part of GCC.
  21331. +;;
  21332. +;; This program is free software; you can redistribute it and/or modify
  21333. +;; it under the terms of the GNU General Public License as published by
  21334. +;; the Free Software Foundation; either version 2 of the License, or
  21335. +;; (at your option) any later version.
  21336. +;;
  21337. +;; This program is distributed in the hope that it will be useful,
  21338. +;; but WITHOUT ANY WARRANTY; without even the implied warranty of
  21339. +;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
  21340. +;; GNU General Public License for more details.
  21341. +;;
  21342. +;; You should have received a copy of the GNU General Public License
  21343. +;; along with this program; if not, write to the Free Software
  21344. +;; Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
  21345. +
  21346. +(define_insn "*movsf_uc3fp"
  21347. + [(set (match_operand:SF 0 "nonimmediate_operand" "=r,r,r,m")
  21348. + (match_operand:SF 1 "general_operand" "r,G,m,r"))]
  21349. + "TARGET_ARCH_FPU && TARGET_HARD_FLOAT"
  21350. + "@
  21351. + mov\t%0, %1
  21352. + mov\t%0, %1
  21353. + ld.w\t%0, %1
  21354. + st.w\t%0, %1"
  21355. + [(set_attr "length" "2,4,4,4")
  21356. + (set_attr "type" "alu,alu,load,store")])
  21357. +
  21358. +(define_insn "mulsf3"
  21359. + [(set (match_operand:SF 0 "register_operand" "=r")
  21360. + (mult:SF (match_operand:SF 1 "register_operand" "r")
  21361. + (match_operand:SF 2 "register_operand" "r")))]
  21362. + "TARGET_ARCH_FPU && TARGET_HARD_FLOAT"
  21363. + "fmul.s\t%0, %1, %2"
  21364. + [(set_attr "length" "4")
  21365. + (set_attr "type" "fmul")])
  21366. +
  21367. +(define_insn "nmulsf3"
  21368. + [(set (match_operand:SF 0 "register_operand" "=r")
  21369. + (neg:SF (mult:SF (match_operand:SF 1 "register_operand" "%r")
  21370. + (match_operand:SF 2 "register_operand" "r"))))]
  21371. + "TARGET_ARCH_FPU && TARGET_HARD_FLOAT"
  21372. + "fnmul.s\t%0, %1, %2"
  21373. + [(set_attr "length" "4")
  21374. + (set_attr "type" "fmul")])
  21375. +
  21376. +(define_insn "macsf3"
  21377. + [(set (match_operand:SF 0 "register_operand" "=r")
  21378. + (plus:SF (mult:SF (match_operand:SF 1 "register_operand" "r")
  21379. + (match_operand:SF 2 "register_operand" "r"))
  21380. + (match_operand:SF 3 "register_operand" "r")))]
  21381. + "TARGET_ARCH_FPU && TARGET_HARD_FLOAT"
  21382. + "fmac.s\t%0, %3, %1, %2"
  21383. + [(set_attr "length" "4")
  21384. + (set_attr "type" "fmul")])
  21385. +
  21386. +;(define_insn "nmacsf3"
  21387. +; [(set (match_operand:SF 0 "register_operand" "=r")
  21388. +; (plus:SF (neg:SF (match_operand:SF 1 "register_operand" "r"))
  21389. +; (mult:SF(match_operand:SF 2 "register_operand" "r")
  21390. +; (match_operand:SF 3 "register_operand" "r"))))]
  21391. +; "TARGET_ARCH_FPU && TARGET_HARD_FLOAT"
  21392. +; "fnmac.s\t%0, %1, %2, %3"
  21393. +; [(set_attr "length" "4")
  21394. +; (set_attr "type" "fmul")])
  21395. +
  21396. +(define_insn "nmacsf3"
  21397. + [(set (match_operand:SF 0 "register_operand" "=r")
  21398. + (minus:SF (mult:SF (match_operand:SF 2 "register_operand" "r")
  21399. + (match_operand:SF 3 "register_operand" "r"))
  21400. + (match_operand:SF 1 "register_operand" "r")))]
  21401. + "TARGET_ARCH_FPU && TARGET_HARD_FLOAT"
  21402. + "fnmac.s\t%0, %1, %2, %3"
  21403. + [(set_attr "length" "4")
  21404. + (set_attr "type" "fmul")])
  21405. +
  21406. +(define_insn "msubacsf3"
  21407. + [(set (match_operand:SF 0 "register_operand" "=r")
  21408. + (minus:SF (match_operand:SF 3 "register_operand" "r")
  21409. + (mult:SF (match_operand:SF 1 "register_operand" "r")
  21410. + (match_operand:SF 2 "register_operand" "r"))))]
  21411. + "TARGET_ARCH_FPU && TARGET_HARD_FLOAT"
  21412. + "fmsc.s\t%0, %3, %1, %2"
  21413. + [(set_attr "length" "4")
  21414. + (set_attr "type" "fmul")])
  21415. +
  21416. +(define_insn "nmsubacsf3"
  21417. + [(set (match_operand:SF 0 "register_operand" "=r")
  21418. + (minus:SF (neg:SF (mult:SF (match_operand:SF 1 "register_operand" "r")
  21419. + (match_operand:SF 2 "register_operand" "r")))
  21420. + (match_operand:SF 3 "register_operand" "r")))]
  21421. + "TARGET_ARCH_FPU && TARGET_HARD_FLOAT"
  21422. + "fnmsc.s\t%0, %3, %1, %2"
  21423. + [(set_attr "length" "4")
  21424. + (set_attr "type" "fmul")])
  21425. +
  21426. +(define_insn "addsf3"
  21427. + [(set (match_operand:SF 0 "register_operand" "=r")
  21428. + (plus:SF (match_operand:SF 1 "register_operand" "%r")
  21429. + (match_operand:SF 2 "register_operand" "r")))]
  21430. + "TARGET_ARCH_FPU && TARGET_HARD_FLOAT"
  21431. + "fadd.s\t%0, %1, %2"
  21432. + [(set_attr "length" "4")
  21433. + (set_attr "type" "fmul")])
  21434. +
  21435. +(define_insn "subsf3"
  21436. + [(set (match_operand:SF 0 "register_operand" "=r")
  21437. + (minus:SF (match_operand:SF 1 "register_operand" "r")
  21438. + (match_operand:SF 2 "register_operand" "r")))]
  21439. + "TARGET_ARCH_FPU && TARGET_HARD_FLOAT"
  21440. + "fsub.s\t%0, %1, %2"
  21441. + [(set_attr "length" "4")
  21442. + (set_attr "type" "fmul")])
  21443. +
  21444. +(define_insn "fixuns_truncsfsi2"
  21445. + [(set (match_operand:SI 0 "register_operand" "=r")
  21446. + (unsigned_fix:SI (match_operand:SF 1 "register_operand" "r")))]
  21447. + "TARGET_ARCH_FPU && TARGET_HARD_FLOAT"
  21448. + "fcastrs.uw\t%0, %1"
  21449. + [(set_attr "length" "4")])
  21450. +
  21451. +(define_insn "fix_truncsfsi2"
  21452. + [(set (match_operand:SI 0 "register_operand" "=r")
  21453. + (fix:SI (match_operand:SF 1 "register_operand" "r")))]
  21454. + "TARGET_ARCH_FPU && TARGET_HARD_FLOAT"
  21455. + "fcastrs.sw\t%0, %1"
  21456. + [(set_attr "length" "4")])
  21457. +
  21458. +(define_insn "floatunssisf2"
  21459. + [(set (match_operand:SF 0 "register_operand" "=r")
  21460. + (unsigned_float:SF (match_operand:SI 1 "register_operand" "r")))]
  21461. + "TARGET_ARCH_FPU && TARGET_HARD_FLOAT"
  21462. + "fcastuw.s\t%0, %1"
  21463. + [(set_attr "length" "4")])
  21464. +
  21465. +(define_insn "floatsisf2"
  21466. + [(set (match_operand:SF 0 "register_operand" "=r")
  21467. + (float:SF (match_operand:SI 1 "register_operand" "r")))]
  21468. + "TARGET_ARCH_FPU && TARGET_HARD_FLOAT"
  21469. + "fcastsw.s\t%0, %1"
  21470. + [(set_attr "length" "4")])
  21471. +
  21472. +(define_insn "cmpsf_internal_uc3fp"
  21473. + [(set (cc0)
  21474. + (compare:CC
  21475. + (match_operand:SF 0 "register_operand" "r")
  21476. + (match_operand:SF 1 "register_operand" "r")))]
  21477. + "TARGET_ARCH_FPU && TARGET_HARD_FLOAT"
  21478. + {
  21479. + avr32_branch_type = CMP_SF;
  21480. + if (!rtx_equal_p(cc_prev_status.mdep.value, SET_SRC(PATTERN (insn))) )
  21481. + return "fcmp.s\t%0, %1";
  21482. + return "";
  21483. + }
  21484. + [(set_attr "length" "4")
  21485. + (set_attr "cc" "compare")])
  21486. +
  21487. +(define_expand "divsf3"
  21488. + [(set (match_operand:SF 0 "register_operand" "=r")
  21489. + (div:SF (match_operand:SF 1 "register_operand" "r")
  21490. + (match_operand:SF 2 "register_operand" "r")))]
  21491. + "TARGET_ARCH_FPU && TARGET_HARD_FLOAT && flag_unsafe_math_optimizations"
  21492. + "{
  21493. + emit_insn(gen_frcpa_internal(operands[0],operands[2]));
  21494. + emit_insn(gen_mulsf3(operands[0],operands[0],operands[1]));
  21495. + DONE;
  21496. + }"
  21497. +)
  21498. +
  21499. +(define_insn "frcpa_internal"
  21500. + [(set (match_operand:SF 0 "register_operand" "=r")
  21501. + (unspec:SF [(match_operand:SF 1 "register_operand" "r")] UNSPEC_FRCPA))]
  21502. + "TARGET_ARCH_FPU && TARGET_HARD_FLOAT"
  21503. + "frcpa.s %0,%1"
  21504. + [(set_attr "length" "4")])
  21505. +
  21506. +(define_expand "sqrtsf2"
  21507. + [(set (match_operand:SF 0 "register_operand" "")
  21508. + (sqrt:SF (match_operand:SF 1 "register_operand" "")))]
  21509. + "TARGET_ARCH_FPU && TARGET_HARD_FLOAT && flag_unsafe_math_optimizations"
  21510. + "
  21511. +{
  21512. + rtx scratch = gen_reg_rtx (SFmode);
  21513. + emit_insn (gen_rsqrtsf2 (scratch, operands[1], CONST1_RTX (SFmode)));
  21514. + emit_insn (gen_divsf3(operands[0], force_reg (SFmode, CONST1_RTX (SFmode)),
  21515. + scratch));
  21516. + DONE;
  21517. +}")
  21518. +
  21519. +(define_insn "rsqrtsf2"
  21520. + [(set (match_operand:SF 0 "register_operand" "=r")
  21521. + (div:SF (match_operand:SF 2 "const_1f_operand" "F")
  21522. + (sqrt:SF (match_operand:SF 1 "register_operand" "?r"))))]
  21523. + "TARGET_ARCH_FPU && TARGET_HARD_FLOAT"
  21524. + "frsqrta.s %1, %0")
  21525. diff -Nur gcc-4.4.6.orig/gcc/config/avr32/uclinux-elf.h gcc-4.4.6/gcc/config/avr32/uclinux-elf.h
  21526. --- gcc-4.4.6.orig/gcc/config/avr32/uclinux-elf.h 1970-01-01 01:00:00.000000000 +0100
  21527. +++ gcc-4.4.6/gcc/config/avr32/uclinux-elf.h 2011-10-22 19:23:08.528581303 +0200
  21528. @@ -0,0 +1,20 @@
  21529. +
  21530. +/* Run-time Target Specification. */
  21531. +#undef TARGET_VERSION
  21532. +#define TARGET_VERSION fputs (" (AVR32 uClinux with ELF)", stderr)
  21533. +
  21534. +/* We don't want a .jcr section on uClinux. As if this makes a difference... */
  21535. +#define TARGET_USE_JCR_SECTION 0
  21536. +
  21537. +/* Here we go. Drop the crtbegin/crtend stuff completely. */
  21538. +#undef STARTFILE_SPEC
  21539. +#define STARTFILE_SPEC \
  21540. + "%{!shared: %{pg:gcrt1.o%s} %{!pg:%{p:gcrt1.o%s}" \
  21541. + " %{!p:%{profile:gcrt1.o%s}" \
  21542. + " %{!profile:crt1.o%s}}}} crti.o%s"
  21543. +
  21544. +#undef ENDFILE_SPEC
  21545. +#define ENDFILE_SPEC "crtn.o%s"
  21546. +
  21547. +#undef TARGET_DEFAULT
  21548. +#define TARGET_DEFAULT (AVR32_FLAG_NO_INIT_GOT)
  21549. diff -Nur gcc-4.4.6.orig/gcc/config/host-linux.c gcc-4.4.6/gcc/config/host-linux.c
  21550. --- gcc-4.4.6.orig/gcc/config/host-linux.c 2009-02-20 16:20:38.000000000 +0100
  21551. +++ gcc-4.4.6/gcc/config/host-linux.c 2011-10-22 19:23:08.528581303 +0200
  21552. @@ -25,6 +25,9 @@
  21553. #include "hosthooks.h"
  21554. #include "hosthooks-def.h"
  21555. +#ifndef SSIZE_MAX
  21556. +#define SSIZE_MAX LONG_MAX
  21557. +#endif
  21558. /* Linux has a feature called exec-shield-randomize that perturbs the
  21559. address of non-fixed mapped segments by a (relatively) small amount.
  21560. diff -Nur gcc-4.4.6.orig/gcc/config.gcc gcc-4.4.6/gcc/config.gcc
  21561. --- gcc-4.4.6.orig/gcc/config.gcc 2011-02-18 22:39:51.000000000 +0100
  21562. +++ gcc-4.4.6/gcc/config.gcc 2011-10-22 19:23:08.528581303 +0200
  21563. @@ -810,6 +810,24 @@
  21564. avr-*-*)
  21565. tm_file="avr/avr.h dbxelf.h"
  21566. ;;
  21567. +avr32*-*-linux*)
  21568. + tm_file="dbxelf.h elfos.h linux.h avr32/linux-elf.h avr32/avr32.h "
  21569. + tmake_file="t-linux avr32/t-avr32-linux"
  21570. + extra_parts="crtbegin.o crtbeginS.o crtend.o crtendS.o"
  21571. + extra_modes=avr32/avr32-modes.def
  21572. + gnu_ld=yes
  21573. + ;;
  21574. +avr32*-*-uclinux*)
  21575. + tm_file="dbxelf.h elfos.h linux.h avr32/linux-elf.h avr32/uclinux-elf.h avr32/avr32.h"
  21576. + tmake_file="t-linux avr32/t-avr32-linux"
  21577. + extra_modes=avr32/avr32-modes.def
  21578. + gnu_ld=yes
  21579. + ;;
  21580. +avr32-*-*)
  21581. + tm_file="dbxelf.h elfos.h avr32/avr32.h avr32/avr32-elf.h"
  21582. + tmake_file="avr32/t-avr32 avr32/t-elf"
  21583. + extra_modes=avr32/avr32-modes.def
  21584. + ;;
  21585. bfin*-elf*)
  21586. tm_file="${tm_file} dbxelf.h elfos.h bfin/elf.h"
  21587. tmake_file=bfin/t-bfin-elf
  21588. @@ -2736,6 +2754,32 @@
  21589. fi
  21590. ;;
  21591. + avr32*-*-*)
  21592. + supported_defaults="part arch"
  21593. +
  21594. + case "$with_part" in
  21595. + "" \
  21596. + | "ap7000" | "ap7010" | "ap7020" | "uc3a0256" | "uc3a0512" | "uc3a1128" | "uc3a1256" | "uc3a1512" )
  21597. + # OK
  21598. + ;;
  21599. + *)
  21600. + echo "Unknown part used in --with-part=$with_part" 1>&2
  21601. + exit 1
  21602. + ;;
  21603. + esac
  21604. +
  21605. + case "$with_arch" in
  21606. + "" \
  21607. + | "ap" | "uc")
  21608. + # OK
  21609. + ;;
  21610. + *)
  21611. + echo "Unknown arch used in --with-arch=$with_arch" 1>&2
  21612. + exit 1
  21613. + ;;
  21614. + esac
  21615. + ;;
  21616. +
  21617. fr*-*-*linux*)
  21618. supported_defaults=cpu
  21619. case "$with_cpu" in
  21620. diff -Nur gcc-4.4.6.orig/gcc/config.gcc.orig gcc-4.4.6/gcc/config.gcc.orig
  21621. --- gcc-4.4.6.orig/gcc/config.gcc.orig 1970-01-01 01:00:00.000000000 +0100
  21622. +++ gcc-4.4.6/gcc/config.gcc.orig 2011-10-22 19:23:08.528581303 +0200
  21623. @@ -0,0 +1,3208 @@
  21624. +# GCC target-specific configuration file.
  21625. +# Copyright 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007,
  21626. +# 2008, 2009, 2010 Free Software Foundation, Inc.
  21627. +
  21628. +#This file is part of GCC.
  21629. +
  21630. +#GCC is free software; you can redistribute it and/or modify it under
  21631. +#the terms of the GNU General Public License as published by the Free
  21632. +#Software Foundation; either version 3, or (at your option) any later
  21633. +#version.
  21634. +
  21635. +#GCC is distributed in the hope that it will be useful, but WITHOUT
  21636. +#ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
  21637. +#FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
  21638. +#for more details.
  21639. +
  21640. +#You should have received a copy of the GNU General Public License
  21641. +#along with GCC; see the file COPYING3. If not see
  21642. +#<http://www.gnu.org/licenses/>.
  21643. +
  21644. +# This is the GCC target-specific configuration file
  21645. +# where a configuration type is mapped to different system-specific
  21646. +# definitions and files. This is invoked by the autoconf-generated
  21647. +# configure script. Putting it in a separate shell file lets us skip
  21648. +# running autoconf when modifying target-specific information.
  21649. +
  21650. +# When you change the cases in the OS or target switches, consider
  21651. +# updating ../libgcc/config.host also.
  21652. +
  21653. +# This file switches on the shell variable ${target}, and also uses the
  21654. +# following shell variables:
  21655. +#
  21656. +# with_* Various variables as set by configure.
  21657. +#
  21658. +# enable_threads Either the name, yes or no depending on whether
  21659. +# threads support was requested.
  21660. +#
  21661. +# default_use_cxa_atexit
  21662. +# The default value for the $enable___cxa_atexit
  21663. +# variable. enable___cxa_atexit needs to be set to
  21664. +# "yes" for the correct operation of C++ destructors
  21665. +# but it relies upon the presence of a non-standard C
  21666. +# library function called __cxa_atexit.
  21667. +# Since not all C libraries provide __cxa_atexit the
  21668. +# default value of $default_use_cxa_atexit is set to
  21669. +# "no" except for targets which are known to be OK.
  21670. +#
  21671. +# gas_flag Either yes or no depending on whether GNU as was
  21672. +# requested.
  21673. +#
  21674. +# gnu_ld_flag Either yes or no depending on whether GNU ld was
  21675. +# requested.
  21676. +
  21677. +# This file sets the following shell variables for use by the
  21678. +# autoconf-generated configure script:
  21679. +#
  21680. +# cpu_type The name of the cpu, if different from the first
  21681. +# chunk of the canonical target name.
  21682. +#
  21683. +# tm_defines List of target macros to define for all compilations.
  21684. +#
  21685. +# tm_file A list of target macro files, if different from
  21686. +# "$cpu_type/$cpu_type.h". Usually it's constructed
  21687. +# per target in a way like this:
  21688. +# tm_file="${tm_file} dbxelf.h elfos.h svr4.h ${cpu_type.h}/elf.h"
  21689. +# Note that the preferred order is:
  21690. +# - specific target header "${cpu_type}/${cpu_type.h}"
  21691. +# - generic headers like dbxelf.h elfos.h, etc.
  21692. +# - specializing target headers like ${cpu_type.h}/elf.h
  21693. +# This helps to keep OS specific stuff out of the CPU
  21694. +# defining header ${cpu_type}/${cpu_type.h}.
  21695. +#
  21696. +# It is possible to include automatically-generated
  21697. +# build-directory files by prefixing them with "./".
  21698. +# All other files should relative to $srcdir/config.
  21699. +#
  21700. +# tm_p_file Location of file with declarations for functions
  21701. +# in $out_file.
  21702. +#
  21703. +# out_file The name of the machine description C support
  21704. +# file, if different from "$cpu_type/$cpu_type.c".
  21705. +#
  21706. +# md_file The name of the machine-description file, if
  21707. +# different from "$cpu_type/$cpu_type.md".
  21708. +#
  21709. +# tmake_file A list of machine-description-specific
  21710. +# makefile-fragments, if different from
  21711. +# "$cpu_type/t-$cpu_type".
  21712. +#
  21713. +# extra_modes The name of the file containing a list of extra
  21714. +# machine modes, if necessary and different from
  21715. +# "$cpu_type/$cpu_type-modes.def".
  21716. +#
  21717. +# extra_objs List of extra objects that should be linked into
  21718. +# the compiler proper (cc1, cc1obj, cc1plus)
  21719. +# depending on target.
  21720. +#
  21721. +# extra_gcc_objs List of extra objects that should be linked into
  21722. +# the compiler driver (gcc) depending on target.
  21723. +#
  21724. +# extra_headers List of used header files from the directory
  21725. +# config/${cpu_type}.
  21726. +#
  21727. +# use_gcc_tgmath If set, add tgmath.h to the list of used header
  21728. +# files.
  21729. +#
  21730. +# extra_passes List of extra executables compiled for this target
  21731. +# machine, used for compiling from source to object.
  21732. +#
  21733. +# extra_parts List of extra object files that should be compiled
  21734. +# for this target machine.
  21735. +#
  21736. +# extra_programs Like extra_passes, but these are used when linking.
  21737. +#
  21738. +# extra_options List of target-dependent .opt files.
  21739. +#
  21740. +# c_target_objs List of extra target-dependent objects that be
  21741. +# linked into the C compiler only.
  21742. +#
  21743. +# cxx_target_objs List of extra target-dependent objects that be
  21744. +# linked into the C++ compiler only.
  21745. +#
  21746. +# fortran_target_objs List of extra target-dependent objects that be
  21747. +# linked into the fortran compiler only.
  21748. +#
  21749. +# target_gtfiles List of extra source files with type information.
  21750. +#
  21751. +# xm_defines List of macros to define when compiling for the
  21752. +# target machine.
  21753. +#
  21754. +# xm_file List of files to include when compiling for the
  21755. +# target machine.
  21756. +#
  21757. +# use_collect2 Set to yes or no, depending on whether collect2
  21758. +# will be used.
  21759. +#
  21760. +# target_cpu_default Set to override the default target model.
  21761. +#
  21762. +# gdb_needs_out_file_path
  21763. +# Set to yes if gdb needs a dir command with
  21764. +# `dirname $out_file`.
  21765. +#
  21766. +# thread_file Set to control which thread package to use.
  21767. +#
  21768. +# gas Set to yes or no depending on whether the target
  21769. +# system normally uses GNU as.
  21770. +#
  21771. +# need_64bit_hwint Set to yes if HOST_WIDE_INT must be 64 bits wide
  21772. +# for this target. This is true if this target
  21773. +# supports "long" or "wchar_t" wider than 32 bits,
  21774. +# or BITS_PER_WORD is wider than 32 bits.
  21775. +# The setting made here must match the one made in
  21776. +# other locations such as libcpp/configure.ac
  21777. +#
  21778. +# configure_default_options
  21779. +# Set to an initializer for configure_default_options
  21780. +# in configargs.h, based on --with-cpu et cetera.
  21781. +#
  21782. +# use_fixproto Set to "yes" if fixproto should be run normally,
  21783. +# "no" if fixproto should never be run.
  21784. +
  21785. +# The following variables are used in each case-construct to build up the
  21786. +# outgoing variables:
  21787. +#
  21788. +# gnu_ld Set to yes or no depending on whether the target
  21789. +# system normally uses GNU ld.
  21790. +
  21791. +out_file=
  21792. +tmake_file=
  21793. +extra_headers=
  21794. +use_gcc_tgmath=yes
  21795. +extra_passes=
  21796. +extra_parts=
  21797. +extra_programs=
  21798. +extra_objs=
  21799. +extra_gcc_objs=
  21800. +extra_options=
  21801. +c_target_objs=
  21802. +cxx_target_objs=
  21803. +fortran_target_objs=
  21804. +tm_defines=
  21805. +xm_defines=
  21806. +# Set this to force installation and use of collect2.
  21807. +use_collect2=
  21808. +# Set this to override the default target model.
  21809. +target_cpu_default=
  21810. +# Set this if gdb needs a dir command with `dirname $out_file`
  21811. +gdb_needs_out_file_path=
  21812. +# Set this to control which thread package will be used.
  21813. +thread_file=
  21814. +# Reinitialize these from the flag values every loop pass, since some
  21815. +# configure entries modify them.
  21816. +gas="$gas_flag"
  21817. +gnu_ld="$gnu_ld_flag"
  21818. +default_use_cxa_atexit=no
  21819. +target_gtfiles=
  21820. +need_64bit_hwint=
  21821. +
  21822. +# Default to not using fixproto. Targets which need fixproto should
  21823. +# specifically set this to 'yes'.
  21824. +use_fixproto=no
  21825. +
  21826. +# Don't carry these over build->host->target. Please.
  21827. +xm_file=
  21828. +md_file=
  21829. +
  21830. +# Obsolete configurations.
  21831. +case ${target} in
  21832. +# Avoid generic cases below matching.
  21833. + h8300-*-rtems* | h8300-*-elf* \
  21834. + | sh-*-elf* | sh-*-symbianelf* | sh-*-linux* | sh-*-netbsdelf* \
  21835. + | sh-*-rtems* | sh-wrs-vxworks) ;;
  21836. + arm-*-coff* \
  21837. + | armel-*-coff* \
  21838. + | h8300-*-* \
  21839. + | i[34567]86-*-aout* \
  21840. + | i[34567]86-*-coff* \
  21841. + | m68k-*-aout* \
  21842. + | m68k-*-coff* \
  21843. + | sh-*-* \
  21844. + | pdp11-*-bsd \
  21845. + | rs6000-ibm-aix4.[12]* \
  21846. + | powerpc-ibm-aix4.[12]* \
  21847. + )
  21848. + if test "x$enable_obsolete" != xyes; then
  21849. + echo "*** Configuration ${target} is obsolete." >&2
  21850. + echo "*** Specify --enable-obsolete to build it anyway." >&2
  21851. + echo "*** Support will be REMOVED in the next major release of GCC," >&2
  21852. + echo "*** unless a maintainer comes forward." >&2
  21853. + exit 1
  21854. + fi;;
  21855. +esac
  21856. +
  21857. +# Unsupported targets list. Do not put an entry in this list unless
  21858. +# it would otherwise be caught by a more permissive pattern. The list
  21859. +# should be in alphabetical order.
  21860. +case ${target} in
  21861. + i[34567]86-go32-* \
  21862. + | i[34567]86-*-go32* \
  21863. + | mips64orion*-*-rtems* \
  21864. + | sparc-hal-solaris2* \
  21865. + | thumb-*-* \
  21866. + | *-*-linux*aout* \
  21867. + | *-*-linux*coff* \
  21868. + | *-*-linux*libc1* \
  21869. + | *-*-linux*oldld* \
  21870. + | *-*-rtemsaout* \
  21871. + | *-*-rtemscoff* \
  21872. + | *-*-solaris2.[0-6] \
  21873. + | *-*-solaris2.[0-6].* \
  21874. + | *-*-sysv* \
  21875. + | vax-*-vms* \
  21876. + )
  21877. + echo "*** Configuration ${target} not supported" 1>&2
  21878. + exit 1
  21879. + ;;
  21880. +esac
  21881. +
  21882. +# Set default cpu_type, tm_file, tm_p_file and xm_file so it can be
  21883. +# updated in each machine entry. Also set default extra_headers for some
  21884. +# machines.
  21885. +tm_p_file=
  21886. +cpu_type=`echo ${target} | sed 's/-.*$//'`
  21887. +cpu_is_64bit=
  21888. +case ${target} in
  21889. +m32c*-*-*)
  21890. + cpu_type=m32c
  21891. + tmake_file=m32c/t-m32c
  21892. + ;;
  21893. +alpha*-*-*)
  21894. + cpu_type=alpha
  21895. + need_64bit_hwint=yes
  21896. + ;;
  21897. +am33_2.0-*-linux*)
  21898. + cpu_type=mn10300
  21899. + ;;
  21900. +arm*-*-*)
  21901. + cpu_type=arm
  21902. + extra_headers="mmintrin.h arm_neon.h"
  21903. + c_target_objs="arm-c.o"
  21904. + cxx_target_objs="arm-c.o"
  21905. + ;;
  21906. +bfin*-*)
  21907. + cpu_type=bfin
  21908. + ;;
  21909. +crisv32-*)
  21910. + cpu_type=cris
  21911. + ;;
  21912. +frv*) cpu_type=frv
  21913. + ;;
  21914. +fido-*-*)
  21915. + cpu_type=m68k
  21916. + extra_headers=math-68881.h
  21917. + ;;
  21918. +i[34567]86-*-*)
  21919. + cpu_type=i386
  21920. + c_target_objs="i386-c.o"
  21921. + cxx_target_objs="i386-c.o"
  21922. + extra_headers="cpuid.h mmintrin.h mm3dnow.h xmmintrin.h emmintrin.h
  21923. + pmmintrin.h tmmintrin.h ammintrin.h smmintrin.h
  21924. + nmmintrin.h bmmintrin.h mmintrin-common.h
  21925. + wmmintrin.h immintrin.h x86intrin.h avxintrin.h
  21926. + cross-stdarg.h"
  21927. + ;;
  21928. +x86_64-*-*)
  21929. + cpu_type=i386
  21930. + c_target_objs="i386-c.o"
  21931. + cxx_target_objs="i386-c.o"
  21932. + extra_headers="cpuid.h mmintrin.h mm3dnow.h xmmintrin.h emmintrin.h
  21933. + pmmintrin.h tmmintrin.h ammintrin.h smmintrin.h
  21934. + nmmintrin.h bmmintrin.h mmintrin-common.h
  21935. + wmmintrin.h immintrin.h x86intrin.h avxintrin.h
  21936. + cross-stdarg.h"
  21937. + need_64bit_hwint=yes
  21938. + ;;
  21939. +ia64-*-*)
  21940. + extra_headers=ia64intrin.h
  21941. + need_64bit_hwint=yes
  21942. + ;;
  21943. +hppa*-*-*)
  21944. + cpu_type=pa
  21945. + ;;
  21946. +m32r*-*-*)
  21947. + cpu_type=m32r
  21948. + ;;
  21949. +m68k-*-*)
  21950. + extra_headers=math-68881.h
  21951. + ;;
  21952. +mips*-*-*)
  21953. + cpu_type=mips
  21954. + need_64bit_hwint=yes
  21955. + extra_headers="loongson.h"
  21956. + ;;
  21957. +picochip-*-*)
  21958. + cpu_type=picochip
  21959. + ;;
  21960. +powerpc*-*-*)
  21961. + cpu_type=rs6000
  21962. + extra_headers="ppc-asm.h altivec.h spe.h ppu_intrinsics.h paired.h spu2vmx.h vec_types.h si2vmx.h"
  21963. + need_64bit_hwint=yes
  21964. + case x$with_cpu in
  21965. + xpowerpc64|xdefault64|x6[23]0|x970|xG5|xpower[34567]|xpower6x|xrs64a|xcell)
  21966. + cpu_is_64bit=yes
  21967. + ;;
  21968. + esac
  21969. + ;;
  21970. +rs6000*-*-*)
  21971. + need_64bit_hwint=yes
  21972. + ;;
  21973. +score*-*-*)
  21974. + cpu_type=score
  21975. + ;;
  21976. +sparc*-*-*)
  21977. + cpu_type=sparc
  21978. + need_64bit_hwint=yes
  21979. + ;;
  21980. +spu*-*-*)
  21981. + cpu_type=spu
  21982. + need_64bit_hwint=yes
  21983. + ;;
  21984. +s390*-*-*)
  21985. + cpu_type=s390
  21986. + need_64bit_hwint=yes
  21987. + ;;
  21988. +# Note the 'l'; we need to be able to match e.g. "shle" or "shl".
  21989. +sh[123456789lbe]*-*-* | sh-*-*)
  21990. + cpu_type=sh
  21991. + need_64bit_hwint=yes
  21992. + ;;
  21993. +esac
  21994. +
  21995. +tm_file=${cpu_type}/${cpu_type}.h
  21996. +if test -f ${srcdir}/config/${cpu_type}/${cpu_type}-protos.h
  21997. +then
  21998. + tm_p_file=${cpu_type}/${cpu_type}-protos.h
  21999. +fi
  22000. +extra_modes=
  22001. +if test -f ${srcdir}/config/${cpu_type}/${cpu_type}-modes.def
  22002. +then
  22003. + extra_modes=${cpu_type}/${cpu_type}-modes.def
  22004. +fi
  22005. +if test -f ${srcdir}/config/${cpu_type}/${cpu_type}.opt
  22006. +then
  22007. + extra_options="${extra_options} ${cpu_type}/${cpu_type}.opt"
  22008. +fi
  22009. +
  22010. +case ${target} in
  22011. +i[34567]86-*-*)
  22012. + if test "x$enable_cld" = xyes; then
  22013. + tm_defines="${tm_defines} USE_IX86_CLD=1"
  22014. + fi
  22015. + ;;
  22016. +x86_64-*-*)
  22017. + tm_file="i386/biarch64.h ${tm_file}"
  22018. + if test "x$enable_cld" = xyes; then
  22019. + tm_defines="${tm_defines} USE_IX86_CLD=1"
  22020. + fi
  22021. + ;;
  22022. +esac
  22023. +
  22024. +# On a.out targets, we need to use collect2.
  22025. +case ${target} in
  22026. +*-*-*aout*)
  22027. + use_collect2=yes
  22028. + ;;
  22029. +esac
  22030. +
  22031. +# Common parts for widely ported systems.
  22032. +case ${target} in
  22033. +*-*-darwin*)
  22034. + tm_file="${tm_file} darwin.h"
  22035. + case ${target} in
  22036. + *-*-darwin[912]*)
  22037. + tm_file="${tm_file} darwin9.h"
  22038. + ;;
  22039. + esac
  22040. + tm_file="${tm_file} ${cpu_type}/darwin.h"
  22041. + tm_p_file="${tm_p_file} darwin-protos.h"
  22042. + tmake_file="t-darwin ${cpu_type}/t-darwin t-slibgcc-darwin"
  22043. + target_gtfiles="\$(srcdir)/config/darwin.c"
  22044. + extra_options="${extra_options} darwin.opt"
  22045. + c_target_objs="${c_target_objs} darwin-c.o"
  22046. + cxx_target_objs="${cxx_target_objs} darwin-c.o"
  22047. + fortran_target_objs="darwin-f.o"
  22048. + extra_objs="darwin.o"
  22049. + extra_gcc_objs="darwin-driver.o"
  22050. + default_use_cxa_atexit=yes
  22051. + case ${enable_threads} in
  22052. + "" | yes | posix) thread_file='posix' ;;
  22053. + esac
  22054. + ;;
  22055. +*-*-freebsd[12] | *-*-freebsd[12].* | *-*-freebsd*aout*)
  22056. + # This is the place-holder for the generic a.out configuration
  22057. + # of FreeBSD. No actual configuration resides here since
  22058. + # there was only ever a bare-bones ix86 configuration for
  22059. + # a.out and it exists solely in the machine-specific section.
  22060. + # This place-holder must exist to avoid dropping into
  22061. + # the generic ELF configuration of FreeBSD (i.e. it must be
  22062. + # ordered before that section).
  22063. + ;;
  22064. +*-*-freebsd*)
  22065. + # This is the generic ELF configuration of FreeBSD. Later
  22066. + # machine-specific sections may refine and add to this
  22067. + # configuration.
  22068. + #
  22069. + # Due to tm_file entry ordering issues that vary between cpu
  22070. + # architectures, we only define fbsd_tm_file to allow the
  22071. + # machine-specific section to dictate the final order of all
  22072. + # entries of tm_file with the minor exception that components
  22073. + # of the tm_file set here will always be of the form:
  22074. + #
  22075. + # freebsd<version_number>.h [freebsd-<conf_option>.h ...] freebsd-spec.h freebsd.h
  22076. + #
  22077. + # The machine-specific section should not tamper with this
  22078. + # ordering but may order all other entries of tm_file as it
  22079. + # pleases around the provided core setting.
  22080. + gas=yes
  22081. + gnu_ld=yes
  22082. + extra_parts="crtbegin.o crtend.o crtbeginS.o crtendS.o"
  22083. + fbsd_major=`echo ${target} | sed -e 's/.*freebsd//g' | sed -e 's/\..*//g'`
  22084. + tm_defines="${tm_defines} FBSD_MAJOR=${fbsd_major}"
  22085. + tmake_file="t-slibgcc-elf-ver t-freebsd"
  22086. + case ${enable_threads} in
  22087. + no)
  22088. + fbsd_tm_file="${fbsd_tm_file} freebsd-nthr.h"
  22089. + ;;
  22090. + "" | yes | posix)
  22091. + thread_file='posix'
  22092. + tmake_file="${tmake_file} t-freebsd-thread"
  22093. + # Before 5.0, FreeBSD can't bind shared libraries to -lc
  22094. + # when "optionally" threaded via weak pthread_* checks.
  22095. + case ${target} in
  22096. + *-*-freebsd[34] | *-*-freebsd[34].*)
  22097. + tmake_file="${tmake_file} t-slibgcc-nolc-override"
  22098. + ;;
  22099. + esac
  22100. + ;;
  22101. + *)
  22102. + echo 'Unknown thread configuration for FreeBSD'
  22103. + exit 1
  22104. + ;;
  22105. + esac
  22106. + fbsd_tm_file="${fbsd_tm_file} freebsd-spec.h freebsd.h"
  22107. + case ${target} in
  22108. + *-*-freebsd[345].*)
  22109. + :;;
  22110. + *)
  22111. + default_use_cxa_atexit=yes;;
  22112. + esac
  22113. + ;;
  22114. +*-*-linux* | frv-*-*linux* | *-*-kfreebsd*-gnu | *-*-knetbsd*-gnu | *-*-gnu* | *-*-kopensolaris*-gnu)
  22115. + extra_parts="crtbegin.o crtbeginS.o crtbeginT.o crtend.o crtendS.o"
  22116. + gas=yes
  22117. + gnu_ld=yes
  22118. + case ${enable_threads} in
  22119. + "" | yes | posix) thread_file='posix' ;;
  22120. + esac
  22121. + tmake_file="t-slibgcc-elf-ver t-linux"
  22122. + case $target in
  22123. + *-*-linux* | frv-*-*linux* | *-*-kfreebsd*-gnu | *-*-knetbsd*-gnu | *-*-kopensolaris*-gnu)
  22124. + :;;
  22125. + *-*-gnu*)
  22126. + tmake_file="$tmake_file t-gnu";;
  22127. + esac
  22128. + # glibc / uclibc switch. uclibc isn't usable for GNU/Hurd and neither for
  22129. + # GNU/k*BSD.
  22130. + case $target in
  22131. + *linux*)
  22132. + extra_options="$extra_options linux.opt";;
  22133. + *)
  22134. + tm_defines="$tm_defines OPTION_GLIBC=1";;
  22135. + esac
  22136. + case ${target} in
  22137. + *-*-*uclibc*)
  22138. + tm_defines="${tm_defines} UCLIBC_DEFAULT=1"
  22139. + ;;
  22140. + *)
  22141. + tm_defines="${tm_defines} UCLIBC_DEFAULT=0"
  22142. + ;;
  22143. + esac
  22144. + # Assume that glibc or uClibc are being used and so __cxa_atexit is provided.
  22145. + default_use_cxa_atexit=yes
  22146. + use_gcc_tgmath=no
  22147. + ;;
  22148. +*-*-netbsd*)
  22149. + tmake_file="t-slibgcc-elf-ver t-libc-ok t-netbsd t-libgcc-pic"
  22150. + gas=yes
  22151. + gnu_ld=yes
  22152. +
  22153. + # NetBSD 2.0 and later get POSIX threads enabled by default.
  22154. + # Allow them to be explicitly enabled on any other version.
  22155. + case ${enable_threads} in
  22156. + "")
  22157. + case ${target} in
  22158. + *-*-netbsd[2-9]* | *-*-netbsdelf[2-9]*)
  22159. + thread_file='posix'
  22160. + tm_defines="${tm_defines} NETBSD_ENABLE_PTHREADS"
  22161. + ;;
  22162. + esac
  22163. + ;;
  22164. + yes | posix)
  22165. + thread_file='posix'
  22166. + tm_defines="${tm_defines} NETBSD_ENABLE_PTHREADS"
  22167. + ;;
  22168. + esac
  22169. +
  22170. + # NetBSD 1.7 and later are set up to use GCC's crtstuff for
  22171. + # ELF configurations. We will clear extra_parts in the
  22172. + # a.out configurations.
  22173. + case ${target} in
  22174. + *-*-netbsd*1.[7-9]* | *-*-netbsd[2-9]* | *-*-netbsdelf[2-9]*)
  22175. + extra_parts="crtbegin.o crtend.o crtbeginS.o crtendS.o crtbeginT.o"
  22176. + ;;
  22177. + esac
  22178. +
  22179. + # NetBSD 2.0 and later provide __cxa_atexit(), which we use by
  22180. + # default (unless overridden by --disable-__cxa_atexit).
  22181. + case ${target} in
  22182. + *-*-netbsd[2-9]* | *-*-netbsdelf[2-9]*)
  22183. + default_use_cxa_atexit=yes
  22184. + ;;
  22185. + esac
  22186. + ;;
  22187. +*-*-openbsd*)
  22188. + tmake_file="t-libc-ok t-openbsd t-libgcc-pic"
  22189. + case ${enable_threads} in
  22190. + yes)
  22191. + thread_file='posix'
  22192. + tmake_file="${tmake_file} t-openbsd-thread"
  22193. + ;;
  22194. + esac
  22195. + case ${target} in
  22196. + *-*-openbsd2.*|*-*-openbsd3.[012])
  22197. + tm_defines="${tm_defines} HAS_LIBC_R=1" ;;
  22198. + esac
  22199. + ;;
  22200. +*-*-rtems*)
  22201. + case ${enable_threads} in
  22202. + yes) thread_file='rtems' ;;
  22203. + esac
  22204. + ;;
  22205. +*-*-vxworks*)
  22206. + tmake_file=t-vxworks
  22207. + xm_defines=POSIX
  22208. + extra_options="${extra_options} vxworks.opt"
  22209. + extra_objs=vxworks.o
  22210. + case ${enable_threads} in
  22211. + no) ;;
  22212. + "" | yes | vxworks) thread_file='vxworks' ;;
  22213. + *) echo 'Unknown thread configuration for VxWorks'; exit 1 ;;
  22214. + esac
  22215. + ;;
  22216. +*-*-elf)
  22217. + # Assume that newlib is being used and so __cxa_atexit is provided.
  22218. + default_use_cxa_atexit=yes
  22219. + ;;
  22220. +esac
  22221. +
  22222. +case ${target} in
  22223. +# Support site-specific machine types.
  22224. +*local*)
  22225. + rest=`echo ${target} | sed -e "s/$cpu_type-//"`
  22226. + tm_file=${cpu_type}/$rest.h
  22227. + if test -f $srcdir/config/${cpu_type}/xm-$rest.h
  22228. + then xm_file=${cpu_type}/xm-$rest.h
  22229. + fi
  22230. + if test -f $srcdir/config/${cpu_type}/t-$rest
  22231. + then tmake_file=${cpu_type}/t-$rest
  22232. + fi
  22233. + ;;
  22234. +alpha*-*-linux*)
  22235. + tm_file="${tm_file} alpha/elf.h alpha/linux.h alpha/linux-elf.h"
  22236. + target_cpu_default="MASK_GAS"
  22237. + tmake_file="${tmake_file} alpha/t-crtfm alpha/t-alpha alpha/t-ieee alpha/t-linux"
  22238. + ;;
  22239. +alpha*-*-gnu*)
  22240. + tm_file="$tm_file alpha/elf.h alpha/linux.h alpha/linux-elf.h gnu.h alpha/gnu.h"
  22241. + target_cpu_default="MASK_GAS"
  22242. + tmake_file="${tmake_file} alpha/t-crtfm alpha/t-alpha alpha/t-ieee"
  22243. + ;;
  22244. +alpha*-*-freebsd*)
  22245. + tm_file="${tm_file} ${fbsd_tm_file} alpha/elf.h alpha/freebsd.h"
  22246. + target_cpu_default="MASK_GAS"
  22247. + tmake_file="${tmake_file} alpha/t-crtfm alpha/t-alpha alpha/t-ieee"
  22248. + extra_parts="crtbegin.o crtend.o crtbeginS.o crtendS.o crtbeginT.o"
  22249. + ;;
  22250. +alpha*-*-netbsd*)
  22251. + tm_file="${tm_file} netbsd.h alpha/elf.h netbsd-elf.h alpha/netbsd.h"
  22252. + target_cpu_default="MASK_GAS"
  22253. + tmake_file="${tmake_file} alpha/t-alpha alpha/t-ieee"
  22254. + ;;
  22255. +alpha*-*-openbsd*)
  22256. + tm_defines="${tm_defines} OBSD_NO_DYNAMIC_LIBRARIES OBSD_HAS_DECLARE_FUNCTION_NAME OBSD_HAS_DECLARE_FUNCTION_SIZE OBSD_HAS_DECLARE_OBJECT"
  22257. + tm_file="alpha/alpha.h openbsd.h alpha/openbsd.h"
  22258. + # default x-alpha is only appropriate for dec-osf.
  22259. + target_cpu_default="MASK_GAS"
  22260. + tmake_file="alpha/t-alpha alpha/t-ieee"
  22261. + ;;
  22262. +alpha*-dec-osf[45]*)
  22263. + if test x$stabs = xyes
  22264. + then
  22265. + tm_file="${tm_file} dbx.h"
  22266. + fi
  22267. + if test x$gas != xyes
  22268. + then
  22269. + extra_passes="mips-tfile mips-tdump"
  22270. + fi
  22271. + use_collect2=yes
  22272. + tmake_file="alpha/t-alpha alpha/t-ieee alpha/t-crtfm alpha/t-osf4"
  22273. + tm_file="${tm_file} alpha/osf.h"
  22274. + extra_headers=va_list.h
  22275. + case ${target} in
  22276. + *-*-osf4*)
  22277. + # Define TARGET_SUPPORT_ARCH except on 4.0a.
  22278. + case ${target} in
  22279. + *-*-osf4.0a) ;;
  22280. + *) tm_defines="${tm_defines} TARGET_SUPPORT_ARCH=1"
  22281. + esac
  22282. + ;;
  22283. + *-*-osf5*)
  22284. + tm_file="${tm_file} alpha/osf5.h"
  22285. + tm_defines="${tm_defines} TARGET_SUPPORT_ARCH=1"
  22286. + ;;
  22287. + esac
  22288. + case ${enable_threads} in
  22289. + "" | yes | posix)
  22290. + thread_file='posix'
  22291. + tmake_file="${tmake_file} alpha/t-osf-pthread"
  22292. + ;;
  22293. + esac
  22294. + ;;
  22295. +alpha64-dec-*vms*)
  22296. + tm_file="${tm_file} alpha/vms.h alpha/vms64.h"
  22297. + xm_file="alpha/xm-vms.h"
  22298. + tmake_file="alpha/t-alpha alpha/t-vms alpha/t-vms64 alpha/t-ieee"
  22299. + prefix=/gnu
  22300. + local_prefix=/gnu
  22301. + ;;
  22302. +alpha*-dec-*vms*)
  22303. + tm_file="${tm_file} alpha/vms.h"
  22304. + xm_file=alpha/xm-vms.h
  22305. + tmake_file="alpha/t-alpha alpha/t-vms alpha/t-ieee"
  22306. + prefix=/gnu
  22307. + local_prefix=/gnu
  22308. + ;;
  22309. +arc-*-elf*)
  22310. + tm_file="dbxelf.h elfos.h svr4.h ${tm_file}"
  22311. + extra_parts="crtinit.o crtfini.o"
  22312. + ;;
  22313. +arm-*-coff* | armel-*-coff*)
  22314. + tm_file="arm/semi.h arm/aout.h arm/arm.h arm/coff.h dbxcoff.h"
  22315. + tmake_file="arm/t-arm arm/t-arm-coff"
  22316. + ;;
  22317. +arm-wrs-vxworks)
  22318. + tm_file="elfos.h arm/elf.h arm/aout.h ${tm_file} vx-common.h vxworks.h arm/vxworks.h"
  22319. + tmake_file="${tmake_file} arm/t-arm arm/t-vxworks"
  22320. + ;;
  22321. +arm*-*-freebsd*)
  22322. + tm_file="dbxelf.h elfos.h ${fbsd_tm_file} arm/elf.h arm/aout.h arm/freebsd.h arm/arm.h"
  22323. + tmake_file="${tmake_file} arm/t-arm arm/t-strongarm-elf"
  22324. + ;;
  22325. +arm*-*-netbsdelf*)
  22326. + tm_file="dbxelf.h elfos.h netbsd.h netbsd-elf.h arm/elf.h arm/aout.h arm/arm.h arm/netbsd-elf.h"
  22327. + tmake_file="${tmake_file} arm/t-arm arm/t-netbsd"
  22328. + ;;
  22329. +arm*-*-netbsd*)
  22330. + tm_file="arm/aout.h arm/arm.h netbsd.h netbsd-aout.h arm/netbsd.h"
  22331. + tmake_file="t-netbsd arm/t-arm arm/t-netbsd"
  22332. + extra_parts=""
  22333. + use_collect2=yes
  22334. + ;;
  22335. +arm*-*-linux*) # ARM GNU/Linux with ELF
  22336. + tm_file="dbxelf.h elfos.h linux.h arm/elf.h arm/linux-gas.h arm/linux-elf.h"
  22337. + case $target in
  22338. + arm*b-*)
  22339. + tm_defines="${tm_defines} TARGET_BIG_ENDIAN_DEFAULT=1"
  22340. + ;;
  22341. + esac
  22342. + tmake_file="${tmake_file} t-linux arm/t-arm"
  22343. + case ${target} in
  22344. + arm*-*-linux-*eabi)
  22345. + tm_file="$tm_file arm/bpabi.h arm/linux-eabi.h"
  22346. + tmake_file="$tmake_file arm/t-arm-elf arm/t-bpabi arm/t-linux-eabi t-slibgcc-libgcc"
  22347. + # The BPABI long long divmod functions return a 128-bit value in
  22348. + # registers r0-r3. Correctly modeling that requires the use of
  22349. + # TImode.
  22350. + need_64bit_hwint=yes
  22351. + # The EABI requires the use of __cxa_atexit.
  22352. + default_use_cxa_atexit=yes
  22353. + ;;
  22354. + *)
  22355. + tmake_file="$tmake_file arm/t-linux"
  22356. + ;;
  22357. + esac
  22358. + tm_file="$tm_file arm/aout.h arm/arm.h"
  22359. + tmake_file="${tmake_file} arm/t-arm-softfp soft-fp/t-softfp"
  22360. + ;;
  22361. +arm*-*-uclinux*) # ARM ucLinux
  22362. + tm_file="dbxelf.h elfos.h arm/unknown-elf.h arm/elf.h arm/linux-gas.h arm/uclinux-elf.h"
  22363. + tmake_file="arm/t-arm arm/t-arm-elf"
  22364. + case ${target} in
  22365. + arm*-*-uclinux*eabi)
  22366. + tm_file="$tm_file arm/bpabi.h arm/uclinux-eabi.h"
  22367. + tmake_file="$tmake_file arm/t-bpabi"
  22368. + # The BPABI long long divmod functions return a 128-bit value in
  22369. + # registers r0-r3. Correctly modeling that requires the use of
  22370. + # TImode.
  22371. + need_64bit_hwint=yes
  22372. + # The EABI requires the use of __cxa_atexit.
  22373. + default_use_cxa_atexit=yes
  22374. + esac
  22375. + tm_file="$tm_file arm/aout.h arm/arm.h"
  22376. + tmake_file="${tmake_file} arm/t-arm-softfp soft-fp/t-softfp"
  22377. + ;;
  22378. +arm*-*-ecos-elf)
  22379. + tm_file="dbxelf.h elfos.h arm/unknown-elf.h arm/elf.h arm/aout.h arm/arm.h arm/ecos-elf.h"
  22380. + tmake_file="arm/t-arm arm/t-arm-elf"
  22381. + tmake_file="${tmake_file} arm/t-arm-softfp soft-fp/t-softfp"
  22382. + ;;
  22383. +arm*-*-eabi* | arm*-*-symbianelf* )
  22384. + # The BPABI long long divmod functions return a 128-bit value in
  22385. + # registers r0-r3. Correctly modeling that requires the use of
  22386. + # TImode.
  22387. + need_64bit_hwint=yes
  22388. + default_use_cxa_atexit=yes
  22389. + tm_file="dbxelf.h elfos.h arm/unknown-elf.h arm/elf.h arm/bpabi.h"
  22390. + tmake_file="arm/t-arm arm/t-arm-elf"
  22391. + case ${target} in
  22392. + arm*-*-eabi*)
  22393. + tm_file="$tm_file arm/eabi.h"
  22394. + tmake_file="${tmake_file} arm/t-bpabi"
  22395. + extra_options="${extra_options} arm/eabi.opt"
  22396. + ;;
  22397. + arm*-*-symbianelf*)
  22398. + tm_file="${tm_file} arm/symbian.h"
  22399. + # We do not include t-bpabi for Symbian OS because the system
  22400. + # provides its own implementation of the BPABI functions.
  22401. + tmake_file="${tmake_file} arm/t-symbian"
  22402. + ;;
  22403. + esac
  22404. + tm_file="${tm_file} arm/aout.h arm/arm.h"
  22405. + tmake_file="${tmake_file} arm/t-arm-softfp soft-fp/t-softfp"
  22406. + ;;
  22407. +arm*-*-rtems*)
  22408. + tm_file="dbxelf.h elfos.h arm/unknown-elf.h arm/elf.h arm/aout.h arm/arm.h arm/rtems-elf.h rtems.h"
  22409. + tmake_file="arm/t-arm arm/t-arm-elf t-rtems arm/t-rtems"
  22410. + tmake_file="${tmake_file} arm/t-arm-softfp soft-fp/t-softfp"
  22411. + ;;
  22412. +arm*-*-elf)
  22413. + tm_file="dbxelf.h elfos.h arm/unknown-elf.h arm/elf.h arm/aout.h arm/arm.h"
  22414. + tmake_file="arm/t-arm arm/t-arm-elf"
  22415. + tmake_file="${tmake_file} arm/t-arm-softfp soft-fp/t-softfp"
  22416. + ;;
  22417. +arm*-wince-pe*)
  22418. + tm_file="arm/semi.h arm/aout.h arm/arm.h arm/coff.h dbxcoff.h arm/pe.h arm/wince-pe.h"
  22419. + tmake_file="arm/t-arm arm/t-wince-pe"
  22420. + extra_options="${extra_options} arm/pe.opt"
  22421. + extra_objs="pe.o"
  22422. + ;;
  22423. +arm-*-pe*)
  22424. + tm_file="arm/semi.h arm/aout.h arm/arm.h arm/coff.h dbxcoff.h arm/pe.h"
  22425. + tmake_file="arm/t-arm arm/t-pe"
  22426. + extra_options="${extra_options} arm/pe.opt"
  22427. + extra_objs="pe.o"
  22428. + ;;
  22429. +avr-*-rtems*)
  22430. + tm_file="avr/avr.h dbxelf.h avr/rtems.h rtems.h"
  22431. + tmake_file="avr/t-avr t-rtems avr/t-rtems"
  22432. + ;;
  22433. +avr-*-*)
  22434. + tm_file="avr/avr.h dbxelf.h"
  22435. + ;;
  22436. +bfin*-elf*)
  22437. + tm_file="${tm_file} dbxelf.h elfos.h bfin/elf.h"
  22438. + tmake_file=bfin/t-bfin-elf
  22439. + use_collect2=no
  22440. + ;;
  22441. +bfin*-uclinux*)
  22442. + tm_file="${tm_file} dbxelf.h elfos.h bfin/elf.h linux.h bfin/uclinux.h"
  22443. + tmake_file=bfin/t-bfin-uclinux
  22444. + tm_defines="${tm_defines} UCLIBC_DEFAULT=1"
  22445. + extra_options="${extra_options} linux.opt"
  22446. + use_collect2=no
  22447. + ;;
  22448. +bfin*-linux-uclibc*)
  22449. + tm_file="${tm_file} dbxelf.h elfos.h bfin/elf.h linux.h bfin/linux.h ./linux-sysroot-suffix.h"
  22450. + tmake_file="t-slibgcc-elf-ver bfin/t-bfin-linux"
  22451. + extra_parts="crtbegin.o crtbeginS.o crtend.o crtendS.o"
  22452. + use_collect2=no
  22453. + ;;
  22454. +bfin*-rtems*)
  22455. + tm_file="${tm_file} dbxelf.h elfos.h bfin/elf.h bfin/rtems.h rtems.h"
  22456. + tmake_file="bfin/t-bfin t-rtems bfin/t-rtems"
  22457. + ;;
  22458. +bfin*-*)
  22459. + tm_file="${tm_file} dbxelf.h elfos.h bfin/elf.h"
  22460. + tmake_file=bfin/t-bfin
  22461. + use_collect2=no
  22462. + ;;
  22463. +crisv32-*-elf | crisv32-*-none)
  22464. + tm_file="dbxelf.h elfos.h ${tm_file}"
  22465. + tmake_file="cris/t-cris"
  22466. + target_cpu_default=32
  22467. + gas=yes
  22468. + extra_options="${extra_options} cris/elf.opt"
  22469. + ;;
  22470. +cris-*-elf | cris-*-none)
  22471. + tm_file="dbxelf.h elfos.h ${tm_file}"
  22472. + tmake_file="cris/t-cris cris/t-elfmulti"
  22473. + gas=yes
  22474. + extra_options="${extra_options} cris/elf.opt"
  22475. + ;;
  22476. +crisv32-*-linux* | cris-*-linux*)
  22477. + tm_file="dbxelf.h elfos.h svr4.h ${tm_file} linux.h cris/linux.h"
  22478. + # We need to avoid using t-linux, so override default tmake_file
  22479. + tmake_file="cris/t-cris t-slibgcc-elf-ver cris/t-linux"
  22480. + extra_options="${extra_options} cris/linux.opt"
  22481. + case $target in
  22482. + cris-*-*)
  22483. + target_cpu_default=10
  22484. + ;;
  22485. + crisv32-*-*)
  22486. + target_cpu_default=32
  22487. + ;;
  22488. + esac
  22489. + ;;
  22490. +crx-*-elf)
  22491. + tm_file="elfos.h ${tm_file}"
  22492. + extra_parts="crtbegin.o crtend.o"
  22493. + use_collect2=no
  22494. + ;;
  22495. +fr30-*-elf)
  22496. + tm_file="dbxelf.h elfos.h svr4.h ${tm_file}"
  22497. + tmake_file=fr30/t-fr30
  22498. + extra_parts="crti.o crtn.o crtbegin.o crtend.o"
  22499. + ;;
  22500. +frv-*-elf)
  22501. + tm_file="dbxelf.h elfos.h svr4.h ${tm_file} frv/frv-abi.h"
  22502. + tmake_file=frv/t-frv
  22503. + ;;
  22504. +frv-*-*linux*)
  22505. + tm_file="dbxelf.h elfos.h svr4.h ${tm_file} \
  22506. + linux.h frv/linux.h frv/frv-abi.h"
  22507. + tmake_file="${tmake_file} frv/t-frv frv/t-linux"
  22508. + ;;
  22509. +h8300-*-rtems*)
  22510. + tmake_file="h8300/t-h8300 h8300/t-elf t-rtems h8300/t-rtems"
  22511. + tm_file="h8300/h8300.h dbxelf.h elfos.h h8300/elf.h h8300/rtems.h rtems.h"
  22512. + ;;
  22513. +h8300-*-elf*)
  22514. + tmake_file="h8300/t-h8300 h8300/t-elf"
  22515. + tm_file="h8300/h8300.h dbxelf.h elfos.h h8300/elf.h"
  22516. + ;;
  22517. +h8300-*-*)
  22518. + tm_file="h8300/h8300.h dbxcoff.h h8300/coff.h"
  22519. + ;;
  22520. +hppa*64*-*-linux*)
  22521. + target_cpu_default="MASK_PA_11|MASK_PA_20"
  22522. + tm_file="pa/pa64-start.h ${tm_file} dbxelf.h elfos.h svr4.h linux.h \
  22523. + pa/pa-linux.h pa/pa64-regs.h pa/pa-64.h pa/pa64-linux.h"
  22524. + tmake_file="${tmake_file} pa/t-linux64"
  22525. + gas=yes gnu_ld=yes
  22526. + need_64bit_hwint=yes
  22527. + ;;
  22528. +hppa*-*-linux*)
  22529. + target_cpu_default="MASK_PA_11|MASK_NO_SPACE_REGS"
  22530. + tm_file="${tm_file} dbxelf.h elfos.h svr4.h linux.h pa/pa-linux.h \
  22531. + pa/pa32-regs.h pa/pa32-linux.h"
  22532. + tmake_file="${tmake_file} pa/t-linux"
  22533. + # Set the libgcc version number
  22534. + if test x$sjlj = x1; then
  22535. + tmake_file="$tmake_file pa/t-slibgcc-sjlj-ver"
  22536. + else
  22537. + tmake_file="$tmake_file pa/t-slibgcc-dwarf-ver"
  22538. + fi
  22539. + ;;
  22540. +# port not yet contributed.
  22541. +#hppa*-*-openbsd*)
  22542. +# target_cpu_default="MASK_PA_11"
  22543. +# ;;
  22544. +hppa[12]*-*-hpux10*)
  22545. + case ${target} in
  22546. + hppa1.1-*-* | hppa2*-*-*)
  22547. + target_cpu_default="MASK_PA_11"
  22548. + ;;
  22549. + esac
  22550. + tm_file="${tm_file} pa/pa32-regs.h dbxelf.h pa/som.h \
  22551. + pa/pa-hpux.h pa/pa-hpux10.h"
  22552. + extra_options="${extra_options} pa/pa-hpux.opt"
  22553. + case ${target} in
  22554. + *-*-hpux10.[1-9]*)
  22555. + tm_file="${tm_file} pa/pa-hpux1010.h"
  22556. + extra_options="${extra_options} pa/pa-hpux1010.opt"
  22557. + ;;
  22558. + esac
  22559. + tmake_file="pa/t-pa-hpux10 pa/t-pa-hpux pa/t-hpux-shlib"
  22560. + case ${enable_threads} in
  22561. + "")
  22562. + if test x$have_pthread_h = xyes ; then
  22563. + tmake_file="${tmake_file} pa/t-dce-thr"
  22564. + fi
  22565. + ;;
  22566. + yes | dce)
  22567. + tmake_file="${tmake_file} pa/t-dce-thr"
  22568. + ;;
  22569. + esac
  22570. + # Set the libgcc version number
  22571. + if test x$sjlj = x1; then
  22572. + tmake_file="$tmake_file pa/t-slibgcc-sjlj-ver"
  22573. + else
  22574. + tmake_file="$tmake_file pa/t-slibgcc-dwarf-ver"
  22575. + fi
  22576. + use_collect2=yes
  22577. + gas=yes
  22578. + ;;
  22579. +hppa*64*-*-hpux11*)
  22580. + target_cpu_default="MASK_PA_11|MASK_PA_20"
  22581. + if test x$gnu_ld = xyes
  22582. + then
  22583. + target_cpu_default="${target_cpu_default}|MASK_GNU_LD"
  22584. + fi
  22585. + tm_file="pa/pa64-start.h ${tm_file} dbxelf.h elfos.h \
  22586. + pa/pa64-regs.h pa/pa-hpux.h pa/pa-hpux1010.h \
  22587. + pa/pa-hpux11.h"
  22588. + case ${target} in
  22589. + *-*-hpux11.[1-9]*)
  22590. + tm_file="${tm_file} pa/pa-hpux1111.h pa/pa-64.h pa/pa64-hpux.h"
  22591. + extra_options="${extra_options} pa/pa-hpux1111.opt"
  22592. + ;;
  22593. + *)
  22594. + tm_file="${tm_file} pa/pa-64.h pa/pa64-hpux.h"
  22595. + ;;
  22596. + esac
  22597. + extra_options="${extra_options} pa/pa-hpux.opt \
  22598. + pa/pa-hpux1010.opt pa/pa64-hpux.opt"
  22599. + need_64bit_hwint=yes
  22600. + tmake_file="pa/t-pa64 pa/t-pa-hpux pa/t-hpux-shlib"
  22601. + # Set the libgcc version number
  22602. + if test x$sjlj = x1; then
  22603. + tmake_file="$tmake_file pa/t-slibgcc-sjlj-ver"
  22604. + else
  22605. + tmake_file="$tmake_file pa/t-slibgcc-dwarf-ver"
  22606. + fi
  22607. + extra_parts="crtbegin.o crtend.o crtbeginS.o crtendS.o crtbeginT.o \
  22608. + libgcc_stub.a"
  22609. + case x${enable_threads} in
  22610. + x | xyes | xposix )
  22611. + thread_file=posix
  22612. + ;;
  22613. + esac
  22614. + gas=yes
  22615. + ;;
  22616. +hppa[12]*-*-hpux11*)
  22617. + case ${target} in
  22618. + hppa1.1-*-* | hppa2*-*-*)
  22619. + target_cpu_default="MASK_PA_11"
  22620. + ;;
  22621. + esac
  22622. + tm_file="${tm_file} pa/pa32-regs.h dbxelf.h pa/som.h \
  22623. + pa/pa-hpux.h pa/pa-hpux1010.h pa/pa-hpux11.h"
  22624. + extra_options="${extra_options} pa/pa-hpux.opt pa/pa-hpux1010.opt"
  22625. + case ${target} in
  22626. + *-*-hpux11.[1-9]*)
  22627. + tm_file="${tm_file} pa/pa-hpux1111.h"
  22628. + extra_options="${extra_options} pa/pa-hpux1111.opt"
  22629. + ;;
  22630. + esac
  22631. + tmake_file="pa/t-pa-hpux11 pa/t-pa-hpux pa/t-hpux-shlib"
  22632. + # Set the libgcc version number
  22633. + if test x$sjlj = x1; then
  22634. + tmake_file="$tmake_file pa/t-slibgcc-sjlj-ver"
  22635. + else
  22636. + tmake_file="$tmake_file pa/t-slibgcc-dwarf-ver"
  22637. + fi
  22638. + extra_parts="libgcc_stub.a"
  22639. + case x${enable_threads} in
  22640. + x | xyes | xposix )
  22641. + thread_file=posix
  22642. + ;;
  22643. + esac
  22644. + use_collect2=yes
  22645. + gas=yes
  22646. + ;;
  22647. +i[34567]86-*-darwin*)
  22648. + need_64bit_hwint=yes
  22649. +
  22650. + # This is so that '.../configure && make' doesn't fail due to
  22651. + # config.guess deciding that the configuration is i386-*-darwin* and
  22652. + # then this file using that to set --with-cpu=i386 which has no -m64
  22653. + # support.
  22654. + with_cpu=${with_cpu:-generic}
  22655. + tmake_file="${tmake_file} i386/t-crtpc i386/t-crtfm"
  22656. + ;;
  22657. +x86_64-*-darwin*)
  22658. + with_cpu=${with_cpu:-generic}
  22659. + tmake_file="${tmake_file} t-darwin ${cpu_type}/t-darwin64 t-slibgcc-darwin i386/t-crtpc i386/t-crtfm"
  22660. + tm_file="${tm_file} ${cpu_type}/darwin64.h"
  22661. + ;;
  22662. +i[34567]86-*-elf*)
  22663. + tm_file="${tm_file} i386/unix.h i386/att.h dbxelf.h elfos.h i386/i386elf.h"
  22664. + tmake_file="${tmake_file} i386/t-i386elf t-svr4"
  22665. + ;;
  22666. +x86_64-*-elf*)
  22667. + tm_file="${tm_file} i386/unix.h i386/att.h dbxelf.h elfos.h i386/i386elf.h i386/x86-64.h"
  22668. + tmake_file="${tmake_file} i386/t-i386elf t-svr4"
  22669. + ;;
  22670. +i[34567]86-*-aout*)
  22671. + tm_file="${tm_file} i386/unix.h i386/bsd.h i386/gas.h i386/gstabs.h i386/i386-aout.h"
  22672. + ;;
  22673. +i[34567]86-*-freebsd*)
  22674. + tm_file="${tm_file} i386/unix.h i386/att.h dbxelf.h elfos.h ${fbsd_tm_file} i386/freebsd.h"
  22675. + ;;
  22676. +x86_64-*-freebsd*)
  22677. + tm_file="${tm_file} i386/unix.h i386/att.h dbxelf.h elfos.h ${fbsd_tm_file} i386/x86-64.h i386/freebsd.h i386/freebsd64.h"
  22678. + tmake_file="${tmake_file} i386/t-crtstuff"
  22679. + ;;
  22680. +i[34567]86-*-netbsdelf*)
  22681. + tm_file="${tm_file} i386/unix.h i386/att.h dbxelf.h elfos.h netbsd.h netbsd-elf.h i386/netbsd-elf.h"
  22682. + ;;
  22683. +i[34567]86-*-netbsd*)
  22684. + tm_file="${tm_file} i386/unix.h i386/bsd.h i386/gas.h i386/gstabs.h netbsd.h netbsd-aout.h i386/netbsd.h"
  22685. + tmake_file="${tmake_file} t-netbsd"
  22686. + extra_parts=""
  22687. + use_collect2=yes
  22688. + ;;
  22689. +x86_64-*-netbsd*)
  22690. + tm_file="${tm_file} i386/unix.h i386/att.h dbxelf.h elfos.h netbsd.h netbsd-elf.h i386/x86-64.h i386/netbsd64.h"
  22691. + tmake_file="${tmake_file} i386/t-crtstuff"
  22692. + ;;
  22693. +i[34567]86-*-openbsd2.*|i[34567]86-*openbsd3.[0123])
  22694. + tm_file="i386/i386.h i386/unix.h i386/bsd.h i386/gas.h i386/gstabs.h openbsd-oldgas.h openbsd.h i386/openbsd.h"
  22695. + # needed to unconfuse gdb
  22696. + tmake_file="${tmake_file} t-libc-ok t-openbsd i386/t-openbsd"
  22697. + # we need collect2 until our bug is fixed...
  22698. + use_collect2=yes
  22699. + ;;
  22700. +i[34567]86-*-openbsd*)
  22701. + tm_file="${tm_file} i386/unix.h i386/att.h dbxelf.h elfos.h"
  22702. + tm_file="${tm_file} openbsd.h i386/openbsdelf.h"
  22703. + gas=yes
  22704. + gnu_ld=yes
  22705. + ;;
  22706. +i[34567]86-*-coff*)
  22707. + tm_file="${tm_file} i386/unix.h i386/bsd.h i386/gas.h dbxcoff.h i386/i386-coff.h"
  22708. + ;;
  22709. +i[34567]86-*-linux* | i[34567]86-*-kfreebsd*-gnu | i[34567]86-*-knetbsd*-gnu | i[34567]86-*-gnu* | i[34567]86-*-kopensolaris*-gnu)
  22710. + # Intel 80386's running GNU/*
  22711. + # with ELF format using glibc 2
  22712. + tm_file="${tm_file} i386/unix.h i386/att.h dbxelf.h elfos.h svr4.h linux.h"
  22713. + case ${target} in
  22714. + i[34567]86-*-linux*)
  22715. + if test x$enable_targets = xall; then
  22716. + tm_file="${tm_file} i386/x86-64.h i386/linux64.h"
  22717. + tm_defines="${tm_defines} TARGET_BI_ARCH=1"
  22718. + tmake_file="${tmake_file} i386/t-linux64"
  22719. + need_64bit_hwint=yes
  22720. + case X"${with_cpu}" in
  22721. + Xgeneric|Xcore2|Xnocona|Xx86-64|Xamdfam10|Xbarcelona|Xk8|Xopteron|Xathlon64|Xathlon-fx|Xathlon64-sse3|Xk8-sse3|Xopteron-sse3)
  22722. + ;;
  22723. + X)
  22724. + if test x$with_cpu_64 = x; then
  22725. + with_cpu_64=generic
  22726. + fi
  22727. + ;;
  22728. + *)
  22729. + echo "Unsupported CPU used in --with-cpu=$with_cpu, supported values:" 1>&2
  22730. + echo "generic core2 nocona x86-64 amdfam10 barcelona k8 opteron athlon64 athlon-fx athlon64-sse3 k8-sse3 opteron-sse3" 1>&2
  22731. + exit 1
  22732. + ;;
  22733. + esac
  22734. + else
  22735. + tm_file="${tm_file} i386/linux.h"
  22736. + fi
  22737. + ;;
  22738. + i[34567]86-*-knetbsd*-gnu) tm_file="${tm_file} i386/linux.h knetbsd-gnu.h i386/knetbsd-gnu.h" ;;
  22739. + i[34567]86-*-kfreebsd*-gnu) tm_file="${tm_file} i386/linux.h kfreebsd-gnu.h i386/kfreebsd-gnu.h" ;;
  22740. + i[34567]86-*-kopensolaris*-gnu) tm_file="${tm_file} i386/linux.h kopensolaris-gnu.h i386/kopensolaris-gnu.h" ;;
  22741. + i[34567]86-*-gnu*) tm_file="$tm_file i386/linux.h gnu.h i386/gnu.h";;
  22742. + esac
  22743. + tmake_file="${tmake_file} i386/t-crtstuff i386/t-crtpc i386/t-crtfm t-dfprules"
  22744. + ;;
  22745. +x86_64-*-linux* | x86_64-*-kfreebsd*-gnu | x86_64-*-knetbsd*-gnu)
  22746. + tm_file="${tm_file} i386/unix.h i386/att.h dbxelf.h elfos.h svr4.h linux.h \
  22747. + i386/x86-64.h i386/linux64.h"
  22748. + case ${target} in
  22749. + x86_64-*-kfreebsd*-gnu) tm_file="${tm_file} kfreebsd-gnu.h" ;;
  22750. + x86_64-*-knetbsd*-gnu) tm_file="${tm_file} knetbsd-gnu.h" ;;
  22751. + esac
  22752. + tmake_file="${tmake_file} i386/t-linux64 i386/t-crtstuff i386/t-crtpc i386/t-crtfm t-dfprules"
  22753. + ;;
  22754. +i[34567]86-pc-msdosdjgpp*)
  22755. + xm_file=i386/xm-djgpp.h
  22756. + tm_file="dbxcoff.h ${tm_file} i386/unix.h i386/bsd.h i386/gas.h i386/djgpp.h"
  22757. + tmake_file="${tmake_file} i386/t-djgpp"
  22758. + extra_options="${extra_options} i386/djgpp.opt"
  22759. + gnu_ld=yes
  22760. + gas=yes
  22761. + ;;
  22762. +i[34567]86-*-lynxos*)
  22763. + xm_defines=POSIX
  22764. + tm_file="${tm_file} i386/unix.h i386/att.h dbxelf.h elfos.h i386/lynx.h lynx.h"
  22765. + tmake_file="${tmake_file} i386/t-crtstuff t-lynx"
  22766. + extra_parts="crtbegin.o crtbeginS.o crtend.o crtendS.o"
  22767. + extra_options="${extra_options} lynx.opt"
  22768. + thread_file=lynx
  22769. + gnu_ld=yes
  22770. + gas=yes
  22771. + ;;
  22772. +i[3456x]86-*-netware*)
  22773. + tm_file="${tm_file} i386/unix.h i386/att.h dbxelf.h elfos.h svr4.h tm-dwarf2.h i386/netware.h"
  22774. + tmake_file="${tmake_file} i386/t-netware"
  22775. + extra_objs=netware.o
  22776. + case /${with_ld} in
  22777. + */nwld)
  22778. + extra_objs="$extra_objs nwld.o"
  22779. + tm_file="${tm_file} i386/nwld.h"
  22780. + tmake_file="${tmake_file} i386/t-nwld"
  22781. + extra_parts="crt0.o libgcc.def libc.def libcpre.def posixpre.def"
  22782. + ;;
  22783. + esac
  22784. + case x${enable_threads} in
  22785. + x | xyes | xposix) thread_file='posix';;
  22786. + xnks) thread_file='nks';;
  22787. + xno) ;;
  22788. + *) echo 'Unknown thread configuration for NetWare' >&2; exit 1;;
  22789. + esac
  22790. + ;;
  22791. +i[34567]86-*-nto-qnx*)
  22792. + tm_file="${tm_file} i386/att.h dbxelf.h tm-dwarf2.h elfos.h svr4.h i386/unix.h i386/nto.h"
  22793. + tmake_file="${tmake_file} i386/t-nto"
  22794. + gnu_ld=yes
  22795. + gas=yes
  22796. + ;;
  22797. +i[34567]86-*-rtems*)
  22798. + tm_file="${tm_file} i386/unix.h i386/att.h dbxelf.h elfos.h i386/i386elf.h i386/rtemself.h rtems.h"
  22799. + extra_parts="crtbegin.o crtend.o crti.o crtn.o"
  22800. + tmake_file="${tmake_file} i386/t-rtems-i386 i386/t-crtstuff t-rtems"
  22801. + ;;
  22802. +i[34567]86-*-solaris2*)
  22803. + tm_file="${tm_file} i386/unix.h i386/att.h dbxelf.h elfos.h svr4.h i386/sysv4.h sol2.h"
  22804. + case ${target} in
  22805. + *-*-solaris2.1[0-9]*)
  22806. + tm_file="${tm_file} sol2-10.h"
  22807. + ;;
  22808. + esac
  22809. + tm_file="${tm_file} i386/sol2.h"
  22810. + if test x$gnu_ld = xyes; then
  22811. + tm_file="${tm_file} sol2-gld.h"
  22812. + fi
  22813. + if test x$gas = xyes; then
  22814. + tm_file="${tm_file} i386/sol2-gas.h"
  22815. + fi
  22816. + tmake_file="${tmake_file} t-sol2 t-svr4"
  22817. + c_target_objs="${c_target_objs} sol2-c.o"
  22818. + cxx_target_objs="${cxx_target_objs} sol2-c.o"
  22819. + extra_objs="sol2.o"
  22820. + tm_p_file="${tm_p_file} sol2-protos.h"
  22821. + if test x$gnu_ld = xyes; then
  22822. + tmake_file="$tmake_file t-slibgcc-elf-ver"
  22823. + tm_defines="${tm_defines} TARGET_GNU_LD=1"
  22824. + else
  22825. + tmake_file="$tmake_file t-slibgcc-sld"
  22826. + fi
  22827. + if test x$gas = xyes; then
  22828. + tm_file="usegas.h ${tm_file}"
  22829. + fi
  22830. + tm_file="$tm_file tm-dwarf2.h"
  22831. + case ${target} in
  22832. + *-*-solaris2.1[0-9]*)
  22833. + tm_file="${tm_file} i386/x86-64.h i386/sol2-10.h"
  22834. + tm_defines="${tm_defines} TARGET_BI_ARCH=1"
  22835. + tmake_file="$tmake_file i386/t-sol2-10"
  22836. + # i386/t-crtstuff only affects libgcc. Its inclusion
  22837. + # depends on a runtime test and is thus performed in
  22838. + # libgcc/configure.ac instead.
  22839. + need_64bit_hwint=yes
  22840. + case X"${with_cpu}" in
  22841. + Xgeneric|Xcore2|Xnocona|Xx86-64|Xamdfam10|Xbarcelona|Xk8|Xopteron|Xathlon64|Xathlon-fx|Xathlon64-sse3|Xk8-sse3|Xopteron-sse3)
  22842. + ;;
  22843. + X)
  22844. + if test x$with_cpu_64 = x; then
  22845. + with_cpu_64=generic
  22846. + fi
  22847. + ;;
  22848. + *)
  22849. + echo "Unsupported CPU used in --with-cpu=$with_cpu, supported values:" 1>&2
  22850. + echo "generic core2 nocona x86-64 amdfam10 barcelona k8 opteron athlon64 athlon-fx athlon64-sse3 k8-sse3 opteron-sse3" 1>&2
  22851. + exit 1
  22852. + ;;
  22853. + esac
  22854. + ;;
  22855. + esac
  22856. + case ${enable_threads}:${have_pthread_h}:${have_thread_h} in
  22857. + "":yes:* | yes:yes:* )
  22858. + thread_file=posix
  22859. + ;;
  22860. + "":*:yes | yes:*:yes )
  22861. + thread_file=solaris
  22862. + ;;
  22863. + esac
  22864. + ;;
  22865. +i[4567]86-wrs-vxworks|i[4567]86-wrs-vxworksae)
  22866. + tm_file="${tm_file} i386/unix.h i386/att.h elfos.h svr4.h vx-common.h"
  22867. + case ${target} in
  22868. + *-vxworksae*)
  22869. + tm_file="${tm_file} vxworksae.h i386/vx-common.h i386/vxworksae.h"
  22870. + tmake_file="${tmake_file} i386/t-vxworks i386/t-vxworksae"
  22871. + ;;
  22872. + *)
  22873. + tm_file="${tm_file} vxworks.h i386/vx-common.h i386/vxworks.h"
  22874. + tmake_file="${tmake_file} i386/t-vxworks"
  22875. + ;;
  22876. + esac
  22877. + ;;
  22878. +i[34567]86-*-pe | i[34567]86-*-cygwin*)
  22879. + tm_file="${tm_file} i386/unix.h i386/bsd.h i386/gas.h dbxcoff.h i386/cygming.h i386/cygwin.h"
  22880. + xm_file=i386/xm-cygwin.h
  22881. + # This has to match the logic for DWARF2_UNWIND_INFO in gcc/config/i386/cygming.h
  22882. + if test x$sjlj = x0; then
  22883. + tmake_eh_file="i386/t-dw2-eh"
  22884. + else
  22885. + tmake_eh_file="i386/t-sjlj-eh"
  22886. + fi
  22887. + tmake_file="${tmake_file} ${tmake_eh_file} i386/t-cygming i386/t-cygwin"
  22888. + target_gtfiles="\$(srcdir)/config/i386/winnt.c"
  22889. + extra_options="${extra_options} i386/cygming.opt"
  22890. + extra_objs="winnt.o winnt-stubs.o"
  22891. + c_target_objs="${c_target_objs} cygwin2.o msformat-c.o"
  22892. + cxx_target_objs="${cxx_target_objs} cygwin2.o winnt-cxx.o msformat-c.o"
  22893. + extra_gcc_objs=cygwin1.o
  22894. + if test x$enable_threads = xyes; then
  22895. + thread_file='posix'
  22896. + fi
  22897. + ;;
  22898. +i[34567]86-*-mingw* | x86_64-*-mingw*)
  22899. + tm_file="${tm_file} i386/unix.h i386/bsd.h i386/gas.h dbxcoff.h i386/cygming.h i386/mingw32.h"
  22900. + xm_file=i386/xm-mingw32.h
  22901. + case ${target} in
  22902. + x86_64-*-*)
  22903. + need_64bit_hwint=yes
  22904. + ;;
  22905. + *)
  22906. + ;;
  22907. + esac
  22908. + # This has to match the logic for DWARF2_UNWIND_INFO in gcc/config/i386/cygming.h
  22909. + if test x$sjlj = x0; then
  22910. + tmake_eh_file="i386/t-dw2-eh"
  22911. + else
  22912. + tmake_eh_file="i386/t-sjlj-eh"
  22913. + fi
  22914. + tmake_file="${tmake_file} ${tmake_eh_file} i386/t-cygming i386/t-mingw32"
  22915. + target_gtfiles="\$(srcdir)/config/i386/winnt.c"
  22916. + extra_options="${extra_options} i386/cygming.opt i386/mingw.opt"
  22917. + extra_objs="winnt.o winnt-stubs.o"
  22918. + c_target_objs="${c_target_objs} msformat-c.o"
  22919. + cxx_target_objs="${cxx_target_objs} winnt-cxx.o msformat-c.o"
  22920. + default_use_cxa_atexit=yes
  22921. + case ${enable_threads} in
  22922. + "" | yes | win32)
  22923. + thread_file='win32'
  22924. + tmake_file="${tmake_file} i386/t-gthr-win32"
  22925. + ;;
  22926. + esac
  22927. + case ${target} in
  22928. + x86_64-*-mingw*)
  22929. + tmake_file="${tmake_file} i386/t-crtfm"
  22930. + ;;
  22931. + *)
  22932. + ;;
  22933. + esac
  22934. + case ${target} in
  22935. + *mingw32crt*)
  22936. + tm_file="${tm_file} i386/crtdll.h"
  22937. + ;;
  22938. + *mingw32msv* | *mingw*)
  22939. + ;;
  22940. + esac
  22941. + ;;
  22942. +i[34567]86-*-interix3*)
  22943. + tm_file="${tm_file} i386/unix.h i386/bsd.h i386/gas.h i386/i386-interix.h i386/i386-interix3.h interix.h interix3.h"
  22944. + tmake_file="${tmake_file} i386/t-interix"
  22945. + extra_objs=winnt.o
  22946. + target_gtfiles="\$(srcdir)/config/i386/winnt.c"
  22947. + if test x$enable_threads = xyes ; then
  22948. + thread_file='posix'
  22949. + fi
  22950. + if test x$stabs = xyes ; then
  22951. + tm_file="${tm_file} dbxcoff.h"
  22952. + fi
  22953. + ;;
  22954. +ia64*-*-elf*)
  22955. + tm_file="${tm_file} dbxelf.h elfos.h ia64/sysv4.h ia64/elf.h"
  22956. + tmake_file="ia64/t-ia64"
  22957. + target_cpu_default="0"
  22958. + if test x$gas = xyes
  22959. + then
  22960. + target_cpu_default="${target_cpu_default}|MASK_GNU_AS"
  22961. + fi
  22962. + if test x$gnu_ld = xyes
  22963. + then
  22964. + target_cpu_default="${target_cpu_default}|MASK_GNU_LD"
  22965. + fi
  22966. + extra_parts="crtbegin.o crtend.o crtbeginS.o crtendS.o crtfastmath.o"
  22967. + ;;
  22968. +ia64*-*-freebsd*)
  22969. + tm_file="${tm_file} dbxelf.h elfos.h ${fbsd_tm_file} ia64/sysv4.h ia64/freebsd.h"
  22970. + target_cpu_default="MASK_GNU_AS|MASK_GNU_LD"
  22971. + tmake_file="${tmake_file} ia64/t-ia64"
  22972. + extra_parts="crtbegin.o crtend.o crtbeginS.o crtendS.o crtfastmath.o"
  22973. + ;;
  22974. +ia64*-*-linux*)
  22975. + tm_file="${tm_file} dbxelf.h elfos.h svr4.h linux.h ia64/sysv4.h ia64/linux.h"
  22976. + tmake_file="${tmake_file} ia64/t-ia64 t-libunwind ia64/t-glibc"
  22977. + if test x$with_system_libunwind != xyes ; then
  22978. + tmake_file="${tmake_file} t-libunwind-elf ia64/t-glibc-libunwind"
  22979. + fi
  22980. + target_cpu_default="MASK_GNU_AS|MASK_GNU_LD"
  22981. + extra_parts="crtbegin.o crtend.o crtbeginS.o crtendS.o crtfastmath.o"
  22982. + ;;
  22983. +ia64*-*-hpux*)
  22984. + tm_file="${tm_file} dbxelf.h elfos.h svr4.h ia64/sysv4.h ia64/hpux.h"
  22985. + tmake_file="ia64/t-ia64 ia64/t-hpux"
  22986. + target_cpu_default="MASK_GNU_AS"
  22987. + case x$enable_threads in
  22988. + x | xyes | xposix )
  22989. + thread_file=posix
  22990. + ;;
  22991. + esac
  22992. + use_collect2=no
  22993. + c_target_objs="ia64-c.o"
  22994. + cxx_target_objs="ia64-c.o"
  22995. + extra_options="${extra_options} ia64/ilp32.opt"
  22996. + ;;
  22997. +iq2000*-*-elf*)
  22998. + tm_file="svr4.h elfos.h iq2000/iq2000.h"
  22999. + tmake_file=iq2000/t-iq2000
  23000. + out_file=iq2000/iq2000.c
  23001. + md_file=iq2000/iq2000.md
  23002. + ;;
  23003. +m32r-*-elf*)
  23004. + tm_file="dbxelf.h elfos.h svr4.h ${tm_file}"
  23005. + extra_parts="crtinit.o crtfini.o"
  23006. + ;;
  23007. +m32rle-*-elf*)
  23008. + tm_file="dbxelf.h elfos.h svr4.h m32r/little.h ${tm_file}"
  23009. + extra_parts="crtinit.o crtfini.o m32rx/crtinit.o m32rx/crtfini.o"
  23010. + ;;
  23011. +m32r-*-rtems*)
  23012. + tm_file="dbxelf.h elfos.h svr4.h ${tm_file} m32r/rtems.h rtems.h"
  23013. + tmake_file="m32r/t-m32r t-rtems"
  23014. + extra_parts="crtinit.o crtfini.o"
  23015. + ;;
  23016. +m32r-*-linux*)
  23017. + tm_file="dbxelf.h elfos.h svr4.h linux.h ${tm_file} m32r/linux.h"
  23018. + # We override the tmake_file for linux -- why?
  23019. + tmake_file="t-slibgcc-elf-ver m32r/t-linux"
  23020. + gnu_ld=yes
  23021. + if test x$enable_threads = xyes; then
  23022. + thread_file='posix'
  23023. + fi
  23024. + ;;
  23025. +m32rle-*-linux*)
  23026. + tm_file="dbxelf.h elfos.h svr4.h linux.h m32r/little.h ${tm_file} m32r/linux.h"
  23027. + # We override the tmake_file for linux -- why?
  23028. + tmake_file="t-slibgcc-elf-ver m32r/t-linux"
  23029. + gnu_ld=yes
  23030. + if test x$enable_threads = xyes; then
  23031. + thread_file='posix'
  23032. + fi
  23033. + ;;
  23034. +# m68hc11 and m68hc12 share the same machine description.
  23035. +m68hc11-*-*|m6811-*-*)
  23036. + tm_file="dbxelf.h elfos.h usegas.h m68hc11/m68hc11.h"
  23037. + tm_p_file="m68hc11/m68hc11-protos.h"
  23038. + md_file="m68hc11/m68hc11.md"
  23039. + out_file="m68hc11/m68hc11.c"
  23040. + tmake_file="m68hc11/t-m68hc11"
  23041. + ;;
  23042. +m68hc12-*-*|m6812-*-*)
  23043. + tm_file="m68hc11/m68hc12.h dbxelf.h elfos.h usegas.h m68hc11/m68hc11.h"
  23044. + tm_p_file="m68hc11/m68hc11-protos.h"
  23045. + md_file="m68hc11/m68hc11.md"
  23046. + out_file="m68hc11/m68hc11.c"
  23047. + tmake_file="m68hc11/t-m68hc11"
  23048. + extra_options="${extra_options} m68hc11/m68hc11.opt"
  23049. + ;;
  23050. +m68k-*-aout*)
  23051. + default_m68k_cpu=68020
  23052. + default_cf_cpu=5206
  23053. + tmake_file="m68k/t-floatlib m68k/t-m68kbare m68k/t-mlibs"
  23054. + tm_file="${tm_file} m68k/m68k-none.h m68k/m68kemb.h m68k/m68k-aout.h libgloss.h"
  23055. + ;;
  23056. +m68k-*-coff*)
  23057. + default_m68k_cpu=68020
  23058. + default_cf_cpu=5206
  23059. + tmake_file="m68k/t-floatlib m68k/t-m68kbare m68k/t-mlibs"
  23060. + tm_defines="${tm_defines} MOTOROLA=1"
  23061. + tm_file="${tm_file} m68k/m68k-none.h m68k/m68kemb.h dbxcoff.h m68k/coff.h dbx.h"
  23062. + ;;
  23063. +m68k-*-elf* | fido-*-elf*)
  23064. + case ${target} in
  23065. + fido-*-elf*)
  23066. + # Check that $with_cpu makes sense.
  23067. + case $with_cpu in
  23068. + "" | "fidoa")
  23069. + ;;
  23070. + *)
  23071. + echo "Cannot accept --with-cpu=$with_cpu"
  23072. + exit 1
  23073. + ;;
  23074. + esac
  23075. + with_cpu=fidoa
  23076. + ;;
  23077. + *)
  23078. + default_m68k_cpu=68020
  23079. + default_cf_cpu=5206
  23080. + ;;
  23081. + esac
  23082. + tm_file="${tm_file} m68k/m68k-none.h m68k/m68kelf.h dbxelf.h elfos.h m68k/m68kemb.h m68k/m68020-elf.h"
  23083. + tm_defines="${tm_defines} MOTOROLA=1"
  23084. + tmake_file="m68k/t-floatlib m68k/t-m68kbare m68k/t-m68kelf"
  23085. + # Add multilibs for targets other than fido.
  23086. + case ${target} in
  23087. + fido-*-elf*)
  23088. + ;;
  23089. + *)
  23090. + tmake_file="$tmake_file m68k/t-mlibs"
  23091. + ;;
  23092. + esac
  23093. + extra_parts="crtbegin.o crtend.o"
  23094. + ;;
  23095. +m68k*-*-netbsdelf*)
  23096. + default_m68k_cpu=68020
  23097. + default_cf_cpu=5475
  23098. + tm_file="${tm_file} dbxelf.h elfos.h netbsd.h netbsd-elf.h m68k/netbsd-elf.h"
  23099. + tm_defines="${tm_defines} MOTOROLA=1"
  23100. + ;;
  23101. +m68k*-*-openbsd*)
  23102. + default_m68k_cpu=68020
  23103. + default_cf_cpu=5475
  23104. + # needed to unconfuse gdb
  23105. + tm_defines="${tm_defines} OBSD_OLD_GAS"
  23106. + tm_file="${tm_file} openbsd.h m68k/openbsd.h"
  23107. + tmake_file="t-libc-ok t-openbsd m68k/t-openbsd"
  23108. + # we need collect2 until our bug is fixed...
  23109. + use_collect2=yes
  23110. + ;;
  23111. +m68k-*-uclinuxoldabi*) # Motorola m68k/ColdFire running uClinux
  23112. + # with uClibc, using the original
  23113. + # m68k-elf-based ABI
  23114. + default_m68k_cpu=68020
  23115. + default_cf_cpu=5206
  23116. + tm_file="${tm_file} m68k/m68k-none.h m68k/m68kelf.h dbxelf.h elfos.h m68k/uclinux-oldabi.h"
  23117. + tm_defines="${tm_defines} MOTOROLA=1"
  23118. + tmake_file="m68k/t-floatlib m68k/t-uclinux"
  23119. + ;;
  23120. +m68k-*-uclinux*) # Motorola m68k/ColdFire running uClinux
  23121. + # with uClibc, using the new GNU/Linux-style
  23122. + # ABI.
  23123. + default_m68k_cpu=68020
  23124. + default_cf_cpu=5206
  23125. + tm_file="${tm_file} dbxelf.h elfos.h svr4.h linux.h flat.h m68k/linux.h m68k/uclinux.h ./sysroot-suffix.h"
  23126. + tm_defines="${tm_defines} MOTOROLA=1 UCLIBC_DEFAULT=1"
  23127. + extra_options="${extra_options} linux.opt"
  23128. + tmake_file="m68k/t-floatlib m68k/t-uclinux m68k/t-mlibs"
  23129. + ;;
  23130. +m68k-*-linux*) # Motorola m68k's running GNU/Linux
  23131. + # with ELF format using glibc 2
  23132. + # aka the GNU/Linux C library 6.
  23133. + default_m68k_cpu=68020
  23134. + default_cf_cpu=5475
  23135. + with_arch=${with_arch:-m68k}
  23136. + tm_file="${tm_file} dbxelf.h elfos.h svr4.h linux.h m68k/linux.h ./sysroot-suffix.h"
  23137. + extra_options="${extra_options} m68k/ieee.opt"
  23138. + tm_defines="${tm_defines} MOTOROLA=1"
  23139. + tmake_file="${tmake_file} m68k/t-floatlib m68k/t-linux m68k/t-mlibs"
  23140. + # if not configured with --enable-sjlj-exceptions, bump the
  23141. + # libgcc version number
  23142. + if test x$sjlj != x1; then
  23143. + tmake_file="$tmake_file m68k/t-slibgcc-elf-ver"
  23144. + fi
  23145. + ;;
  23146. +m68k-*-rtems*)
  23147. + default_m68k_cpu=68020
  23148. + default_cf_cpu=5206
  23149. + tmake_file="m68k/t-floatlib m68k/t-m68kbare m68k/t-crtstuff t-rtems m68k/t-rtems m68k/t-mlibs"
  23150. + tm_file="${tm_file} m68k/m68k-none.h m68k/m68kelf.h dbxelf.h elfos.h m68k/m68kemb.h m68k/m68020-elf.h m68k/rtemself.h rtems.h"
  23151. + tm_defines="${tm_defines} MOTOROLA=1"
  23152. + extra_parts="crtbegin.o crtend.o"
  23153. + ;;
  23154. +mcore-*-elf)
  23155. + tm_file="dbxelf.h elfos.h svr4.h ${tm_file} mcore/mcore-elf.h"
  23156. + tmake_file=mcore/t-mcore
  23157. + inhibit_libc=true
  23158. + ;;
  23159. +mcore-*-pe*)
  23160. + tm_file="svr3.h dbxcoff.h ${tm_file} mcore/mcore-pe.h"
  23161. + tmake_file=mcore/t-mcore-pe
  23162. + inhibit_libc=true
  23163. + ;;
  23164. +mips-sgi-irix[56]*)
  23165. + tm_file="elfos.h ${tm_file} mips/iris.h"
  23166. + tmake_file="mips/t-iris mips/t-slibgcc-irix"
  23167. + target_cpu_default="MASK_ABICALLS"
  23168. + case ${target} in
  23169. + *-*-irix5*)
  23170. + tm_file="${tm_file} mips/iris5.h"
  23171. + ;;
  23172. +
  23173. + *-*-irix6*)
  23174. + tm_file="${tm_file} mips/iris6.h"
  23175. + tmake_file="${tmake_file} mips/t-iris6"
  23176. + tm_defines="${tm_defines} MIPS_ISA_DEFAULT=3 MIPS_ABI_DEFAULT=ABI_N32"
  23177. + ;;
  23178. + esac
  23179. + if test "x$stabs" = xyes
  23180. + then
  23181. + tm_file="${tm_file} dbx.h mips/dbxmdebug.h"
  23182. + fi
  23183. + if test "x$gnu_ld" = xyes
  23184. + then
  23185. + tm_defines="${tm_defines} IRIX_USING_GNU_LD"
  23186. + fi
  23187. + case ${enable_threads}:${have_pthread_h} in
  23188. + "":yes | yes:yes ) thread_file=posix ;;
  23189. + esac
  23190. + ;;
  23191. +mips*-*-netbsd*) # NetBSD/mips, either endian.
  23192. + target_cpu_default="MASK_ABICALLS"
  23193. + tm_file="elfos.h ${tm_file} mips/elf.h netbsd.h netbsd-elf.h mips/netbsd.h"
  23194. + ;;
  23195. +mips64*-*-linux* | mipsisa64*-*-linux*)
  23196. + tm_file="dbxelf.h elfos.h svr4.h linux.h ${tm_file} mips/linux.h mips/linux64.h"
  23197. + tmake_file="${tmake_file} mips/t-linux64 mips/t-libgcc-mips16"
  23198. + tm_defines="${tm_defines} MIPS_ABI_DEFAULT=ABI_N32"
  23199. + case ${target} in
  23200. + mips64el-st-linux-gnu)
  23201. + tm_file="${tm_file} mips/st.h"
  23202. + tmake_file="${tmake_file} mips/t-st"
  23203. + ;;
  23204. + mips64octeon*-*-linux*)
  23205. + tm_defines="${tm_defines} MIPS_CPU_STRING_DEFAULT=\\\"octeon\\\""
  23206. + target_cpu_default=MASK_SOFT_FLOAT_ABI
  23207. + ;;
  23208. + mipsisa64r2*-*-linux*)
  23209. + tm_defines="${tm_defines} MIPS_ISA_DEFAULT=65"
  23210. + ;;
  23211. + esac
  23212. + gnu_ld=yes
  23213. + gas=yes
  23214. + test x$with_llsc != x || with_llsc=yes
  23215. + ;;
  23216. +mips*-*-linux*) # Linux MIPS, either endian.
  23217. + tm_file="dbxelf.h elfos.h svr4.h linux.h ${tm_file} mips/linux.h"
  23218. + tmake_file="${tmake_file} mips/t-libgcc-mips16"
  23219. + case ${target} in
  23220. + mipsisa32r2*)
  23221. + tm_defines="${tm_defines} MIPS_ISA_DEFAULT=33"
  23222. + ;;
  23223. + mipsisa32*)
  23224. + tm_defines="${tm_defines} MIPS_ISA_DEFAULT=32"
  23225. + esac
  23226. + test x$with_llsc != x || with_llsc=yes
  23227. + ;;
  23228. +mips*-*-openbsd*)
  23229. + tm_defines="${tm_defines} OBSD_HAS_DECLARE_FUNCTION_NAME OBSD_HAS_DECLARE_OBJECT OBSD_HAS_CORRECT_SPECS"
  23230. + target_cpu_default="MASK_ABICALLS"
  23231. + tm_file="mips/mips.h openbsd.h mips/openbsd.h mips/sdb.h"
  23232. + case ${target} in
  23233. + mips*el-*-openbsd*)
  23234. + tm_defines="${tm_defines} TARGET_ENDIAN_DEFAULT=0";;
  23235. + *) tm_defines="${tm_defines} TARGET_ENDIAN_DEFAULT=MASK_BIG_ENDIAN";;
  23236. + esac
  23237. + ;;
  23238. +mips*-sde-elf*)
  23239. + tm_file="elfos.h ${tm_file} mips/elf.h mips/sde.h"
  23240. + tmake_file="mips/t-sde mips/t-libgcc-mips16"
  23241. + case "${with_newlib}" in
  23242. + yes)
  23243. + # newlib / libgloss.
  23244. + ;;
  23245. + *)
  23246. + # MIPS toolkit libraries.
  23247. + tm_file="$tm_file mips/sdemtk.h"
  23248. + tmake_file="$tmake_file mips/t-sdemtk"
  23249. + extra_options="$extra_options mips/sdemtk.opt"
  23250. + case ${enable_threads} in
  23251. + "" | yes | mipssde)
  23252. + thread_file='mipssde'
  23253. + ;;
  23254. + esac
  23255. + ;;
  23256. + esac
  23257. + case ${target} in
  23258. + mipsisa32r2*)
  23259. + tm_defines="MIPS_ISA_DEFAULT=33 MIPS_ABI_DEFAULT=ABI_32"
  23260. + ;;
  23261. + mipsisa32*)
  23262. + tm_defines="MIPS_ISA_DEFAULT=32 MIPS_ABI_DEFAULT=ABI_32"
  23263. + ;;
  23264. + mipsisa64r2*)
  23265. + tm_defines="MIPS_ISA_DEFAULT=65 MIPS_ABI_DEFAULT=ABI_N32"
  23266. + ;;
  23267. + mipsisa64*)
  23268. + tm_defines="MIPS_ISA_DEFAULT=64 MIPS_ABI_DEFAULT=ABI_N32"
  23269. + ;;
  23270. + esac
  23271. + ;;
  23272. +mipsisa32-*-elf* | mipsisa32el-*-elf* | \
  23273. +mipsisa32r2-*-elf* | mipsisa32r2el-*-elf* | \
  23274. +mipsisa64-*-elf* | mipsisa64el-*-elf* | \
  23275. +mipsisa64r2-*-elf* | mipsisa64r2el-*-elf*)
  23276. + tm_file="elfos.h ${tm_file} mips/elf.h"
  23277. + tmake_file="mips/t-isa3264 mips/t-libgcc-mips16"
  23278. + case ${target} in
  23279. + mipsisa32r2*)
  23280. + tm_defines="${tm_defines} MIPS_ISA_DEFAULT=33"
  23281. + ;;
  23282. + mipsisa32*)
  23283. + tm_defines="${tm_defines} MIPS_ISA_DEFAULT=32"
  23284. + ;;
  23285. + mipsisa64r2*)
  23286. + tm_defines="${tm_defines} MIPS_ISA_DEFAULT=65"
  23287. + ;;
  23288. + mipsisa64*)
  23289. + tm_defines="${tm_defines} MIPS_ISA_DEFAULT=64"
  23290. + ;;
  23291. + esac
  23292. + case ${target} in
  23293. + mipsisa32*-*-elfoabi*)
  23294. + tm_defines="${tm_defines} MIPS_ABI_DEFAULT=ABI_32"
  23295. + tm_file="${tm_file} mips/elfoabi.h"
  23296. + ;;
  23297. + mipsisa64*-*-elfoabi*)
  23298. + tm_defines="${tm_defines} MIPS_ABI_DEFAULT=ABI_O64"
  23299. + tm_file="${tm_file} mips/elfoabi.h"
  23300. + ;;
  23301. + *-*-elf*)
  23302. + tm_defines="${tm_defines} MIPS_ABI_DEFAULT=ABI_EABI"
  23303. + ;;
  23304. + esac
  23305. + ;;
  23306. +mipsisa64sr71k-*-elf*)
  23307. + tm_file="elfos.h ${tm_file} mips/elf.h"
  23308. + tmake_file=mips/t-sr71k
  23309. + target_cpu_default="MASK_64BIT|MASK_FLOAT64"
  23310. + tm_defines="${tm_defines} MIPS_ISA_DEFAULT=64 MIPS_CPU_STRING_DEFAULT=\\\"sr71000\\\" MIPS_ABI_DEFAULT=ABI_EABI"
  23311. + ;;
  23312. +mipsisa64sb1-*-elf* | mipsisa64sb1el-*-elf*)
  23313. + tm_file="elfos.h ${tm_file} mips/elf.h"
  23314. + tmake_file="mips/t-elf mips/t-libgcc-mips16 mips/t-sb1"
  23315. + target_cpu_default="MASK_64BIT|MASK_FLOAT64"
  23316. + tm_defines="${tm_defines} MIPS_ISA_DEFAULT=64 MIPS_CPU_STRING_DEFAULT=\\\"sb1\\\" MIPS_ABI_DEFAULT=ABI_O64"
  23317. + ;;
  23318. +mips-*-elf* | mipsel-*-elf*)
  23319. + tm_file="elfos.h ${tm_file} mips/elf.h"
  23320. + tmake_file="mips/t-elf mips/t-libgcc-mips16"
  23321. + ;;
  23322. +mips64-*-elf* | mips64el-*-elf*)
  23323. + tm_file="elfos.h ${tm_file} mips/elf.h"
  23324. + tmake_file="mips/t-elf mips/t-libgcc-mips16"
  23325. + target_cpu_default="MASK_64BIT|MASK_FLOAT64"
  23326. + tm_defines="${tm_defines} MIPS_ISA_DEFAULT=3 MIPS_ABI_DEFAULT=ABI_O64"
  23327. + ;;
  23328. +mips64vr-*-elf* | mips64vrel-*-elf*)
  23329. + tm_file="elfos.h ${tm_file} mips/vr.h mips/elf.h"
  23330. + tmake_file=mips/t-vr
  23331. + ;;
  23332. +mips64orion-*-elf* | mips64orionel-*-elf*)
  23333. + tm_file="elfos.h ${tm_file} mips/elforion.h mips/elf.h"
  23334. + tmake_file="mips/t-elf mips/t-libgcc-mips16"
  23335. + target_cpu_default="MASK_64BIT|MASK_FLOAT64"
  23336. + tm_defines="${tm_defines} MIPS_ISA_DEFAULT=3 MIPS_ABI_DEFAULT=ABI_O64"
  23337. + ;;
  23338. +mips*-*-rtems*)
  23339. + tm_file="elfos.h ${tm_file} mips/elf.h mips/rtems.h rtems.h"
  23340. + tmake_file="mips/t-elf mips/t-libgcc-mips16 t-rtems mips/t-rtems"
  23341. + ;;
  23342. +mips-wrs-vxworks)
  23343. + tm_file="elfos.h ${tm_file} svr4.h mips/elf.h vx-common.h vxworks.h mips/vxworks.h"
  23344. + tmake_file="${tmake_file} mips/t-vxworks"
  23345. + ;;
  23346. +mipstx39-*-elf* | mipstx39el-*-elf*)
  23347. + tm_file="elfos.h ${tm_file} mips/r3900.h mips/elf.h"
  23348. + tmake_file="mips/t-r3900 mips/t-libgcc-mips16"
  23349. + ;;
  23350. +mmix-knuth-mmixware)
  23351. + need_64bit_hwint=yes
  23352. + ;;
  23353. +mn10300-*-*)
  23354. + tm_file="dbxelf.h elfos.h svr4.h ${tm_file}"
  23355. + if test x$stabs = xyes
  23356. + then
  23357. + tm_file="${tm_file} dbx.h"
  23358. + fi
  23359. + use_collect2=no
  23360. + ;;
  23361. +pdp11-*-bsd)
  23362. + tm_file="${tm_file} pdp11/2bsd.h"
  23363. + use_fixproto=yes
  23364. + ;;
  23365. +pdp11-*-*)
  23366. + ;;
  23367. +picochip-*)
  23368. + # Nothing special
  23369. + ;;
  23370. +# port not yet contributed
  23371. +#powerpc-*-openbsd*)
  23372. +# tmake_file="${tmake_file} rs6000/t-fprules rs6000/t-fprules-fpbit "
  23373. +# extra_headers=
  23374. +# ;;
  23375. +powerpc64-*-linux*)
  23376. + tm_file="${tm_file} dbxelf.h elfos.h svr4.h freebsd-spec.h rs6000/sysv4.h"
  23377. + test x$with_cpu != x || cpu_is_64bit=yes
  23378. + test x$cpu_is_64bit != xyes || tm_file="${tm_file} rs6000/default64.h"
  23379. + tm_file="rs6000/biarch64.h ${tm_file} rs6000/linux64.h"
  23380. + if test x${enable_secureplt} = xyes; then
  23381. + tm_file="rs6000/secureplt.h ${tm_file}"
  23382. + fi
  23383. + extra_options="${extra_options} rs6000/sysv4.opt rs6000/linux64.opt"
  23384. + tmake_file="t-dfprules rs6000/t-fprules ${tmake_file} rs6000/t-ppccomm rs6000/t-linux64 rs6000/t-fprules-softfp soft-fp/t-softfp"
  23385. + ;;
  23386. +powerpc64-*-gnu*)
  23387. + tm_file="${cpu_type}/${cpu_type}.h elfos.h svr4.h freebsd-spec.h gnu.h rs6000/sysv4.h rs6000/linux64.h rs6000/gnu.h"
  23388. + extra_options="${extra_options} rs6000/sysv4.opt rs6000/linux64.opt"
  23389. + tmake_file="rs6000/t-fprules t-slibgcc-elf-ver t-gnu rs6000/t-linux64 rs6000/t-fprules-softfp soft-fp/t-softfp"
  23390. + ;;
  23391. +powerpc-*-darwin*)
  23392. + extra_options="${extra_options} rs6000/darwin.opt"
  23393. + extra_parts="crt2.o"
  23394. + case ${target} in
  23395. + *-darwin1[0-9]* | *-darwin[8-9]*)
  23396. + tmake_file="${tmake_file} rs6000/t-darwin8"
  23397. + tm_file="${tm_file} rs6000/darwin8.h"
  23398. + ;;
  23399. + *-darwin7*)
  23400. + tm_file="${tm_file} rs6000/darwin7.h"
  23401. + ;;
  23402. + *-darwin[0-6]*)
  23403. + ;;
  23404. + esac
  23405. + extra_headers=altivec.h
  23406. + ;;
  23407. +powerpc64-*-darwin*)
  23408. + tm_file="${tm_file} ${cpu_type}/darwin8.h ${cpu_type}/darwin64.h"
  23409. + extra_options="${extra_options} ${cpu_type}/darwin.opt"
  23410. + # We're omitting t-darwin8 to avoid building any multilibs
  23411. + extra_headers=altivec.h
  23412. + ;;
  23413. +powerpc*-*-freebsd*)
  23414. + tm_file="${tm_file} dbxelf.h elfos.h ${fbsd_tm_file} rs6000/sysv4.h rs6000/freebsd.h"
  23415. + tmake_file="rs6000/t-fprules rs6000/t-fprules-fpbit rs6000/t-ppcos ${tmake_file} rs6000/t-ppccomm"
  23416. + extra_options="${extra_options} rs6000/sysv4.opt"
  23417. + ;;
  23418. +powerpc-*-netbsd*)
  23419. + tm_file="${tm_file} dbxelf.h elfos.h netbsd.h netbsd-elf.h freebsd-spec.h rs6000/sysv4.h rs6000/netbsd.h"
  23420. + tmake_file="${tmake_file} rs6000/t-netbsd"
  23421. + extra_options="${extra_options} rs6000/sysv4.opt"
  23422. + ;;
  23423. +powerpc-*-eabispe*)
  23424. + tm_file="${tm_file} dbxelf.h elfos.h svr4.h freebsd-spec.h rs6000/sysv4.h rs6000/eabi.h rs6000/e500.h rs6000/eabispe.h"
  23425. + extra_options="${extra_options} rs6000/sysv4.opt"
  23426. + tmake_file="rs6000/t-spe rs6000/t-ppccomm"
  23427. + ;;
  23428. +powerpc-*-eabisimaltivec*)
  23429. + tm_file="${tm_file} dbxelf.h elfos.h svr4.h freebsd-spec.h rs6000/sysv4.h rs6000/eabi.h rs6000/e500.h rs6000/eabisim.h rs6000/eabialtivec.h"
  23430. + extra_options="${extra_options} rs6000/sysv4.opt"
  23431. + tmake_file="rs6000/t-fprules rs6000/t-fprules-fpbit rs6000/t-ppcendian rs6000/t-ppccomm"
  23432. + ;;
  23433. +powerpc-*-eabisim*)
  23434. + tm_file="${tm_file} dbxelf.h elfos.h usegas.h svr4.h freebsd-spec.h rs6000/sysv4.h rs6000/eabi.h rs6000/e500.h rs6000/eabisim.h"
  23435. + extra_options="${extra_options} rs6000/sysv4.opt"
  23436. + tmake_file="rs6000/t-fprules rs6000/t-fprules-fpbit rs6000/t-ppcgas rs6000/t-ppccomm"
  23437. + ;;
  23438. +powerpc-*-elf*)
  23439. + tm_file="${tm_file} dbxelf.h elfos.h usegas.h svr4.h freebsd-spec.h rs6000/sysv4.h"
  23440. + extra_options="${extra_options} rs6000/sysv4.opt"
  23441. + tmake_file="rs6000/t-fprules rs6000/t-fprules-fpbit rs6000/t-ppcgas rs6000/t-ppccomm"
  23442. + ;;
  23443. +powerpc-*-eabialtivec*)
  23444. + tm_file="${tm_file} dbxelf.h elfos.h svr4.h freebsd-spec.h rs6000/sysv4.h rs6000/eabi.h rs6000/e500.h rs6000/eabialtivec.h"
  23445. + extra_options="${extra_options} rs6000/sysv4.opt"
  23446. + tmake_file="rs6000/t-fprules rs6000/t-fprules-fpbit rs6000/t-ppcendian rs6000/t-ppccomm"
  23447. + ;;
  23448. +powerpc-xilinx-eabi*)
  23449. + tm_file="${tm_file} dbxelf.h elfos.h usegas.h svr4.h freebsd-spec.h rs6000/sysv4.h rs6000/eabi.h rs6000/singlefp.h"
  23450. + extra_options="${extra_options} rs6000/sysv4.opt"
  23451. + tmake_file="rs6000/t-fprules rs6000/t-fprules-fpbit rs6000/t-ppcgas rs6000/t-ppccomm"
  23452. + ;;
  23453. +powerpc-*-eabi*)
  23454. + tm_file="${tm_file} dbxelf.h elfos.h usegas.h svr4.h freebsd-spec.h rs6000/sysv4.h rs6000/eabi.h rs6000/e500.h"
  23455. + extra_options="${extra_options} rs6000/sysv4.opt"
  23456. + tmake_file="rs6000/t-fprules rs6000/t-fprules-fpbit rs6000/t-ppcgas rs6000/t-ppccomm"
  23457. + ;;
  23458. +powerpc-*-rtems*)
  23459. + tm_file="${tm_file} dbxelf.h elfos.h svr4.h freebsd-spec.h rs6000/sysv4.h rs6000/eabi.h rs6000/e500.h rs6000/rtems.h rtems.h"
  23460. + extra_options="${extra_options} rs6000/sysv4.opt"
  23461. + tmake_file="rs6000/t-fprules rs6000/t-fprules-fpbit rs6000/t-rtems t-rtems rs6000/t-ppccomm"
  23462. + ;;
  23463. +powerpc-*-linux*altivec*)
  23464. + tm_file="${tm_file} dbxelf.h elfos.h svr4.h freebsd-spec.h rs6000/sysv4.h rs6000/linux.h rs6000/linuxaltivec.h"
  23465. + extra_options="${extra_options} rs6000/sysv4.opt"
  23466. + tmake_file="rs6000/t-fprules rs6000/t-fprules-softfp soft-fp/t-softfp rs6000/t-ppcos ${tmake_file} rs6000/t-ppccomm"
  23467. + ;;
  23468. +powerpc-*-linux*spe*)
  23469. + tm_file="${tm_file} dbxelf.h elfos.h svr4.h freebsd-spec.h rs6000/sysv4.h rs6000/linux.h rs6000/linuxspe.h rs6000/e500.h"
  23470. + extra_options="${extra_options} rs6000/sysv4.opt"
  23471. + tmake_file="t-dfprules rs6000/t-fprules rs6000/t-fprules-softfp soft-fp/t-softfp rs6000/t-ppcos ${tmake_file} rs6000/t-ppccomm"
  23472. + ;;
  23473. +powerpc-*-linux*paired*)
  23474. + tm_file="${tm_file} dbxelf.h elfos.h svr4.h freebsd-spec.h rs6000/sysv4.h rs6000/linux.h rs6000/750cl.h"
  23475. + extra_options="${extra_options} rs6000/sysv4.opt"
  23476. + tmake_file="rs6000/t-fprules rs6000/t-fprules-softfp soft-fp/t-softfp rs6000/t-ppcos ${tmake_file} rs6000/t-ppccomm"
  23477. + ;;
  23478. +powerpc-*-linux*)
  23479. + tm_file="${tm_file} dbxelf.h elfos.h svr4.h freebsd-spec.h rs6000/sysv4.h"
  23480. + extra_options="${extra_options} rs6000/sysv4.opt"
  23481. + tmake_file="t-dfprules rs6000/t-fprules rs6000/t-ppcos ${tmake_file} rs6000/t-ppccomm"
  23482. + case ${enable_targets}:${cpu_is_64bit} in
  23483. + *powerpc64* | all:* | *:yes)
  23484. + if test x$cpu_is_64bit = xyes; then
  23485. + tm_file="${tm_file} rs6000/default64.h"
  23486. + fi
  23487. + tm_file="rs6000/biarch64.h ${tm_file} rs6000/linux64.h"
  23488. + tmake_file="$tmake_file rs6000/t-linux64"
  23489. + extra_options="${extra_options} rs6000/linux64.opt"
  23490. + ;;
  23491. + *)
  23492. + tm_file="${tm_file} rs6000/linux.h"
  23493. + ;;
  23494. + esac
  23495. + tmake_file="${tmake_file} rs6000/t-fprules-softfp soft-fp/t-softfp"
  23496. + if test x${enable_secureplt} = xyes; then
  23497. + tm_file="rs6000/secureplt.h ${tm_file}"
  23498. + fi
  23499. + ;;
  23500. +powerpc-*-gnu-gnualtivec*)
  23501. + tm_file="${cpu_type}/${cpu_type}.h elfos.h svr4.h freebsd-spec.h gnu.h rs6000/sysv4.h rs6000/linux.h rs6000/linuxaltivec.h rs6000/gnu.h"
  23502. + extra_options="${extra_options} rs6000/sysv4.opt"
  23503. + tmake_file="rs6000/t-fprules rs6000/t-fprules-fpbit rs6000/t-ppcos t-slibgcc-elf-ver t-gnu rs6000/t-ppccomm"
  23504. + if test x$enable_threads = xyes; then
  23505. + thread_file='posix'
  23506. + fi
  23507. + ;;
  23508. +powerpc-*-gnu*)
  23509. + tm_file="${cpu_type}/${cpu_type}.h elfos.h svr4.h freebsd-spec.h gnu.h rs6000/sysv4.h rs6000/linux.h rs6000/gnu.h"
  23510. + tmake_file="rs6000/t-fprules rs6000/t-fprules-fpbit rs6000/t-ppcos t-slibgcc-elf-ver t-gnu rs6000/t-ppccomm"
  23511. + extra_options="${extra_options} rs6000/sysv4.opt"
  23512. + if test x$enable_threads = xyes; then
  23513. + thread_file='posix'
  23514. + fi
  23515. + ;;
  23516. +powerpc-wrs-vxworks|powerpc-wrs-vxworksae)
  23517. + tm_file="${tm_file} elfos.h svr4.h freebsd-spec.h rs6000/sysv4.h"
  23518. + tmake_file="${tmake_file} rs6000/t-fprules rs6000/t-fprules-fpbit rs6000/t-ppccomm rs6000/t-vxworks"
  23519. + extra_options="${extra_options} rs6000/sysv4.opt"
  23520. + extra_headers=ppc-asm.h
  23521. + case ${target} in
  23522. + *-vxworksae*)
  23523. + tm_file="${tm_file} vx-common.h vxworksae.h rs6000/vxworks.h rs6000/e500.h rs6000/vxworksae.h"
  23524. + tmake_file="${tmake_file} rs6000/t-vxworksae"
  23525. + ;;
  23526. + *-vxworks*)
  23527. + tm_file="${tm_file} vx-common.h vxworks.h rs6000/vxworks.h rs6000/e500.h"
  23528. + ;;
  23529. + esac
  23530. + ;;
  23531. +powerpc-*-lynxos*)
  23532. + xm_defines=POSIX
  23533. + tm_file="${tm_file} dbxelf.h elfos.h rs6000/sysv4.h rs6000/lynx.h lynx.h"
  23534. + tmake_file="t-lynx rs6000/t-lynx"
  23535. + extra_options="${extra_options} rs6000/sysv4.opt lynx.opt"
  23536. + extra_parts="crtbegin.o crtbeginS.o crtend.o crtendS.o"
  23537. + extra_options="${extra_options} lynx.opt"
  23538. + thread_file=lynx
  23539. + gnu_ld=yes
  23540. + gas=yes
  23541. + ;;
  23542. +powerpcle-*-elf*)
  23543. + tm_file="${tm_file} dbxelf.h elfos.h usegas.h svr4.h freebsd-spec.h rs6000/sysv4.h rs6000/sysv4le.h"
  23544. + tmake_file="rs6000/t-fprules rs6000/t-fprules-fpbit rs6000/t-ppcgas rs6000/t-ppccomm"
  23545. + extra_options="${extra_options} rs6000/sysv4.opt"
  23546. + ;;
  23547. +powerpcle-*-eabisim*)
  23548. + tm_file="${tm_file} dbxelf.h elfos.h usegas.h svr4.h freebsd-spec.h rs6000/sysv4.h rs6000/sysv4le.h rs6000/eabi.h rs6000/e500.h rs6000/eabisim.h"
  23549. + tmake_file="rs6000/t-fprules rs6000/t-fprules-fpbit rs6000/t-ppcgas rs6000/t-ppccomm"
  23550. + extra_options="${extra_options} rs6000/sysv4.opt"
  23551. + ;;
  23552. +powerpcle-*-eabi*)
  23553. + tm_file="${tm_file} dbxelf.h elfos.h usegas.h svr4.h freebsd-spec.h rs6000/sysv4.h rs6000/sysv4le.h rs6000/eabi.h rs6000/e500.h"
  23554. + tmake_file="rs6000/t-fprules rs6000/t-fprules-fpbit rs6000/t-ppcgas rs6000/t-ppccomm"
  23555. + extra_options="${extra_options} rs6000/sysv4.opt"
  23556. + ;;
  23557. +powerpc-xilinx-eabi*)
  23558. + tm_file="${tm_file} dbxelf.h elfos.h usegas.h svr4.h freebsd-spec.h rs6000/sysv4.h rs6000/eabi.h rs6000/singlefp.h rs6000/xfpu.h"
  23559. + extra_options="${extra_options} rs6000/sysv4.opt"
  23560. + tmake_file="rs6000/t-fprules rs6000/t-fprules-fpbit rs6000/t-ppcgas rs6000/t-ppccomm"
  23561. + ;;
  23562. +rs6000-ibm-aix4.[12]* | powerpc-ibm-aix4.[12]*)
  23563. + tm_file="${tm_file} rs6000/aix.h rs6000/aix41.h rs6000/xcoff.h"
  23564. + tmake_file="rs6000/t-fprules rs6000/t-fprules-fpbit rs6000/t-newas"
  23565. + extra_options="${extra_options} rs6000/aix41.opt"
  23566. + use_collect2=yes
  23567. + extra_headers=
  23568. + use_fixproto=yes
  23569. + ;;
  23570. +rs6000-ibm-aix4.[3456789]* | powerpc-ibm-aix4.[3456789]*)
  23571. + tm_file="rs6000/biarch64.h ${tm_file} rs6000/aix.h rs6000/aix43.h rs6000/xcoff.h"
  23572. + tmake_file=rs6000/t-aix43
  23573. + extra_options="${extra_options} rs6000/aix64.opt"
  23574. + use_collect2=yes
  23575. + thread_file='aix'
  23576. + extra_headers=
  23577. + ;;
  23578. +rs6000-ibm-aix5.1.* | powerpc-ibm-aix5.1.*)
  23579. + tm_file="rs6000/biarch64.h ${tm_file} rs6000/aix.h rs6000/aix51.h rs6000/xcoff.h"
  23580. + extra_options="${extra_options} rs6000/aix64.opt"
  23581. + tmake_file=rs6000/t-aix43
  23582. + use_collect2=yes
  23583. + thread_file='aix'
  23584. + extra_headers=
  23585. + ;;
  23586. +rs6000-ibm-aix5.2.* | powerpc-ibm-aix5.2.*)
  23587. + tm_file="${tm_file} rs6000/aix.h rs6000/aix52.h rs6000/xcoff.h"
  23588. + tmake_file=rs6000/t-aix52
  23589. + extra_options="${extra_options} rs6000/aix64.opt"
  23590. + use_collect2=yes
  23591. + thread_file='aix'
  23592. + extra_headers=
  23593. + ;;
  23594. +rs6000-ibm-aix5.3.* | powerpc-ibm-aix5.3.*)
  23595. + tm_file="${tm_file} rs6000/aix.h rs6000/aix53.h rs6000/xcoff.h"
  23596. + tmake_file=rs6000/t-aix52
  23597. + extra_options="${extra_options} rs6000/aix64.opt"
  23598. + use_collect2=yes
  23599. + thread_file='aix'
  23600. + extra_headers=altivec.h
  23601. + ;;
  23602. +rs6000-ibm-aix[6789].* | powerpc-ibm-aix[6789].*)
  23603. + tm_file="${tm_file} rs6000/aix.h rs6000/aix61.h rs6000/xcoff.h"
  23604. + tmake_file=rs6000/t-aix52
  23605. + extra_options="${extra_options} rs6000/aix64.opt"
  23606. + use_collect2=yes
  23607. + thread_file='aix'
  23608. + extra_headers=altivec.h
  23609. + ;;
  23610. +s390-*-linux*)
  23611. + tm_file="s390/s390.h dbxelf.h elfos.h svr4.h linux.h s390/linux.h"
  23612. + tmake_file="${tmake_file} t-dfprules s390/t-crtstuff s390/t-linux"
  23613. + ;;
  23614. +s390x-*-linux*)
  23615. + tm_file="s390/s390x.h s390/s390.h dbxelf.h elfos.h svr4.h linux.h s390/linux.h"
  23616. + tm_p_file=s390/s390-protos.h
  23617. + md_file=s390/s390.md
  23618. + extra_modes=s390/s390-modes.def
  23619. + out_file=s390/s390.c
  23620. + tmake_file="${tmake_file} t-dfprules s390/t-crtstuff s390/t-linux s390/t-linux64"
  23621. + ;;
  23622. +s390x-ibm-tpf*)
  23623. + tm_file="s390/s390x.h s390/s390.h dbxelf.h elfos.h svr4.h s390/tpf.h"
  23624. + tm_p_file=s390/s390-protos.h
  23625. + md_file=s390/s390.md
  23626. + extra_modes=s390/s390-modes.def
  23627. + out_file=s390/s390.c
  23628. + extra_parts="crtbeginS.o crtendS.o"
  23629. + tmake_file="s390/t-crtstuff s390/t-tpf"
  23630. + thread_file='tpf'
  23631. + extra_options="${extra_options} s390/tpf.opt"
  23632. + ;;
  23633. +score-*-elf)
  23634. + tm_file="dbxelf.h elfos.h score/elf.h score/score.h"
  23635. + tmake_file=score/t-score-elf
  23636. + extra_objs="score7.o score3.o"
  23637. + ;;
  23638. +sh-*-elf* | sh[12346l]*-*-elf* | \
  23639. +sh-*-symbianelf* | sh[12346l]*-*-symbianelf* | \
  23640. + sh-*-linux* | sh[2346lbe]*-*-linux* | \
  23641. + sh-*-netbsdelf* | shl*-*-netbsdelf* | sh5-*-netbsd* | sh5l*-*-netbsd* | \
  23642. + sh64-*-netbsd* | sh64l*-*-netbsd*)
  23643. + tmake_file="${tmake_file} sh/t-sh sh/t-elf"
  23644. + if test x${with_endian} = x; then
  23645. + case ${target} in
  23646. + sh[1234]*be-*-* | sh[1234]*eb-*-*) with_endian=big ;;
  23647. + shbe-*-* | sheb-*-*) with_endian=big,little ;;
  23648. + sh[1234]l* | sh[34]*-*-linux*) with_endian=little ;;
  23649. + shl* | sh64l* | sh*-*-linux* | \
  23650. + sh5l* | sh-superh-elf) with_endian=little,big ;;
  23651. + sh[1234]*-*-*) with_endian=big ;;
  23652. + *) with_endian=big,little ;;
  23653. + esac
  23654. + fi
  23655. + case ${with_endian} in
  23656. + big|little) tmake_file="${tmake_file} sh/t-1e" ;;
  23657. + big,little|little,big) ;;
  23658. + *) echo "with_endian=${with_endian} not supported."; exit 1 ;;
  23659. + esac
  23660. + case ${with_endian} in
  23661. + little*) tm_file="sh/little.h ${tm_file}" ;;
  23662. + esac
  23663. + tm_file="${tm_file} dbxelf.h elfos.h"
  23664. + case ${target} in
  23665. + sh*-*-netbsd*) ;;
  23666. + *) tm_file="${tm_file} svr4.h" ;;
  23667. + esac
  23668. + tm_file="${tm_file} sh/elf.h"
  23669. + case ${target} in
  23670. + sh*-*-linux*) tmake_file="${tmake_file} sh/t-linux"
  23671. + tm_file="${tm_file} linux.h sh/linux.h" ;;
  23672. + sh*-*-netbsd*) tm_file="${tm_file} netbsd.h netbsd-elf.h sh/netbsd-elf.h" ;;
  23673. + sh*-superh-elf) if test x$with_libgloss != xno; then
  23674. + with_libgloss=yes
  23675. + tm_file="${tm_file} sh/newlib.h"
  23676. + fi
  23677. + tm_file="${tm_file} sh/embed-elf.h sh/superh.h"
  23678. + tmake_file="${tmake_file} sh/t-superh"
  23679. + extra_options="${extra_options} sh/superh.opt" ;;
  23680. + *) if test x$with_newlib = xyes \
  23681. + && test x$with_libgloss = xyes; then
  23682. + tm_file="${tm_file} sh/newlib.h"
  23683. + fi
  23684. + tm_file="${tm_file} sh/embed-elf.h" ;;
  23685. + esac
  23686. + case ${target} in
  23687. + sh5*-*-netbsd*)
  23688. + # SHmedia, 32-bit ABI
  23689. + tmake_file="${tmake_file} sh/t-sh64 sh/t-netbsd"
  23690. + ;;
  23691. + sh64*-netbsd*)
  23692. + # SHmedia, 64-bit ABI
  23693. + tmake_file="${tmake_file} sh/t-sh64 sh/t-netbsd sh/t-netbsd-sh5-64"
  23694. + ;;
  23695. + *-*-netbsd)
  23696. + tmake_file="${tmake_file} sh/t-netbsd"
  23697. + ;;
  23698. + sh64*-*-linux*)
  23699. + tmake_file="${tmake_file} sh/t-sh64 sh/t-linux64"
  23700. + tm_file="${tm_file} sh/sh64.h"
  23701. + extra_headers="shmedia.h ushmedia.h sshmedia.h"
  23702. + ;;
  23703. + sh64*)
  23704. + tmake_file="${tmake_file} sh/t-sh64"
  23705. + tm_file="${tm_file} sh/sh64.h"
  23706. + extra_headers="shmedia.h ushmedia.h sshmedia.h"
  23707. + ;;
  23708. + *-*-symbianelf*)
  23709. + tmake_file="sh/t-symbian"
  23710. + tm_file="sh/symbian-pre.h sh/little.h ${tm_file} sh/symbian-post.h"
  23711. + extra_objs="symbian.o"
  23712. + extra_parts="crt1.o crti.o crtn.o crtbegin.o crtend.o crtbeginS.o crtendS.o"
  23713. + ;;
  23714. + esac
  23715. + # sed el/eb endian suffixes away to avoid confusion with sh[23]e
  23716. + case `echo ${target} | sed 's/e[lb]-/-/'` in
  23717. + sh64*-*-netbsd*) sh_cpu_target=sh5-64media ;;
  23718. + sh64* | sh5*-*-netbsd*) sh_cpu_target=sh5-32media ;;
  23719. + sh4a_single_only*) sh_cpu_target=sh4a-single-only ;;
  23720. + sh4a_single*) sh_cpu_target=sh4a-single ;;
  23721. + sh4a_nofpu*) sh_cpu_target=sh4a-nofpu ;;
  23722. + sh4al) sh_cpu_target=sh4al ;;
  23723. + sh4a*) sh_cpu_target=sh4a ;;
  23724. + sh4_single_only*) sh_cpu_target=sh4-single-only ;;
  23725. + sh4_single*) sh_cpu_target=sh4-single ;;
  23726. + sh4_nofpu*) sh_cpu_target=sh4-nofpu ;;
  23727. + sh4* | sh-superh-*) sh_cpu_target=sh4 ;;
  23728. + sh3e*) sh_cpu_target=sh3e ;;
  23729. + sh*-*-netbsd* | sh3*) sh_cpu_target=sh3 ;;
  23730. + sh2a_single_only*) sh_cpu_target=sh2a-single-only ;;
  23731. + sh2a_single*) sh_cpu_target=sh2a-single ;;
  23732. + sh2a_nofpu*) sh_cpu_target=sh2a-nofpu ;;
  23733. + sh2a*) sh_cpu_target=sh2a ;;
  23734. + sh2e*) sh_cpu_target=sh2e ;;
  23735. + sh2*) sh_cpu_target=sh2 ;;
  23736. + *) sh_cpu_target=sh1 ;;
  23737. + esac
  23738. + # did the user say --without-fp ?
  23739. + if test x$with_fp = xno; then
  23740. + case ${sh_cpu_target} in
  23741. + sh5-*media) sh_cpu_target=${sh_cpu_target}-nofpu ;;
  23742. + sh4al | sh1) ;;
  23743. + sh4a* ) sh_cpu_target=sh4a-nofpu ;;
  23744. + sh4*) sh_cpu_target=sh4-nofpu ;;
  23745. + sh3*) sh_cpu_target=sh3 ;;
  23746. + sh2a*) sh_cpu_target=sh2a-nofpu ;;
  23747. + sh2*) sh_cpu_target=sh2 ;;
  23748. + *) echo --without-fp not available for $target: ignored
  23749. + esac
  23750. + tm_defines="$tm_defines STRICT_NOFPU=1"
  23751. + fi
  23752. + sh_cpu_default="`echo $with_cpu|sed s/^m/sh/|tr A-Z_ a-z-`"
  23753. + case $sh_cpu_default in
  23754. + sh5-64media-nofpu | sh5-64media | \
  23755. + sh5-32media-nofpu | sh5-32media | sh5-compact-nofpu | sh5-compact | \
  23756. + sh2a-single-only | sh2a-single | sh2a-nofpu | sh2a | \
  23757. + sh4a-single-only | sh4a-single | sh4a-nofpu | sh4a | sh4al | \
  23758. + sh4-single-only | sh4-single | sh4-nofpu | sh4 | sh4-300 | \
  23759. + sh3e | sh3 | sh2e | sh2 | sh1) ;;
  23760. + "") sh_cpu_default=${sh_cpu_target} ;;
  23761. + *) echo "with_cpu=$with_cpu not supported"; exit 1 ;;
  23762. + esac
  23763. + sh_multilibs=${with_multilib_list}
  23764. + if test x${sh_multilibs} = x ; then
  23765. + case ${target} in
  23766. + sh64-superh-linux* | \
  23767. + sh[1234]*) sh_multilibs=${sh_cpu_target} ;;
  23768. + sh64* | sh5*) sh_multilibs=m5-32media,m5-32media-nofpu,m5-compact,m5-compact-nofpu,m5-64media,m5-64media-nofpu ;;
  23769. + sh-superh-*) sh_multilibs=m4,m4-single,m4-single-only,m4-nofpu ;;
  23770. + sh*-*-linux*) sh_multilibs=m1,m3e,m4 ;;
  23771. + sh*-*-netbsd*) sh_multilibs=m3,m3e,m4 ;;
  23772. + *) sh_multilibs=m1,m2,m2e,m4,m4-single,m4-single-only,m2a,m2a-single ;;
  23773. + esac
  23774. + if test x$with_fp = xno; then
  23775. + sh_multilibs="`echo $sh_multilibs|sed -e s/m4/sh4-nofpu/ -e s/,m4-[^,]*//g -e s/,m[23]e// -e s/m2a,m2a-single/m2a-nofpu/ -e s/m5-..m....,//g`"
  23776. + fi
  23777. + fi
  23778. + target_cpu_default=SELECT_`echo ${sh_cpu_default}|tr abcdefghijklmnopqrstuvwxyz- ABCDEFGHIJKLMNOPQRSTUVWXYZ_`
  23779. + tm_defines=${tm_defines}' SH_MULTILIB_CPU_DEFAULT=\"'`echo $sh_cpu_default|sed s/sh/m/`'\"'
  23780. + sh_multilibs=`echo $sh_multilibs,$sh_cpu_default | sed -e 's/[ ,/][ ,]*/ /g' -e 's/ $//' -e 's/^m/sh/' -e 's/ m/ sh/g' | tr ABCDEFGHIJKLMNOPQRSTUVWXYZ_ abcdefghijklmnopqrstuvwxyz-`
  23781. + for sh_multilib in ${sh_multilibs}; do
  23782. + case ${sh_multilib} in
  23783. + sh1 | sh2 | sh2e | sh3 | sh3e | \
  23784. + sh4 | sh4-single | sh4-single-only | sh4-nofpu | sh4-300 |\
  23785. + sh4a | sh4a-single | sh4a-single-only | sh4a-nofpu | sh4al | \
  23786. + sh2a | sh2a-single | sh2a-single-only | sh2a-nofpu | \
  23787. + sh5-64media | sh5-64media-nofpu | \
  23788. + sh5-32media | sh5-32media-nofpu | \
  23789. + sh5-compact | sh5-compact-nofpu)
  23790. + tmake_file="${tmake_file} sh/t-mlib-${sh_multilib}"
  23791. + tm_defines="$tm_defines SUPPORT_`echo $sh_multilib|tr abcdefghijklmnopqrstuvwxyz- ABCDEFGHIJKLMNOPQRSTUVWXYZ_`=1"
  23792. + ;;
  23793. + *)
  23794. + echo "with_multilib_list=${sh_multilib} not supported."
  23795. + exit 1
  23796. + ;;
  23797. + esac
  23798. + done
  23799. + if test x${enable_incomplete_targets} = xyes ; then
  23800. + tm_defines="$tm_defines SUPPORT_SH1=1 SUPPORT_SH2E=1 SUPPORT_SH4=1 SUPPORT_SH4_SINGLE=1 SUPPORT_SH2A=1 SUPPORT_SH2A_SINGLE=1 SUPPORT_SH5_32MEDIA=1 SUPPORT_SH5_32MEDIA_NOFPU=1 SUPPORT_SH5_64MEDIA=1 SUPPORT_SH5_64MEDIA_NOFPU=1"
  23801. + fi
  23802. + ;;
  23803. +sh-*-rtems*)
  23804. + tmake_file="sh/t-sh sh/t-elf t-rtems sh/t-rtems"
  23805. + tm_file="${tm_file} dbxelf.h elfos.h svr4.h sh/elf.h sh/embed-elf.h sh/rtemself.h rtems.h"
  23806. + ;;
  23807. +sh-wrs-vxworks)
  23808. + tmake_file="$tmake_file sh/t-sh sh/t-elf sh/t-vxworks"
  23809. + tm_file="${tm_file} elfos.h svr4.h sh/elf.h sh/embed-elf.h vx-common.h vxworks.h sh/vxworks.h"
  23810. + ;;
  23811. +sh-*-*)
  23812. + tm_file="${tm_file} dbxcoff.h sh/coff.h"
  23813. + ;;
  23814. +sparc-*-netbsdelf*)
  23815. + tm_file="${tm_file} dbxelf.h elfos.h svr4.h sparc/sysv4.h netbsd.h netbsd-elf.h sparc/netbsd-elf.h"
  23816. + extra_options="${extra_options} sparc/long-double-switch.opt"
  23817. + ;;
  23818. +sparc64-*-openbsd*)
  23819. + tm_file="sparc/openbsd1-64.h ${tm_file} dbxelf.h elfos.h svr4.h sparc/sysv4.h sparc/sp64-elf.h openbsd.h sparc/openbsd64.h"
  23820. + extra_options="${extra_options} sparc/little-endian.opt"
  23821. + gas=yes gnu_ld=yes
  23822. + with_cpu=ultrasparc
  23823. + ;;
  23824. +sparc-*-elf*)
  23825. + tm_file="${tm_file} dbxelf.h elfos.h svr4.h sparc/sysv4.h sparc/sp-elf.h"
  23826. + tmake_file="sparc/t-elf sparc/t-crtfm"
  23827. + extra_parts="crti.o crtn.o crtbegin.o crtend.o"
  23828. + ;;
  23829. +sparc-*-linux*) # SPARC's running GNU/Linux, libc6
  23830. + tm_file="${tm_file} dbxelf.h elfos.h svr4.h sparc/sysv4.h sparc/gas.h linux.h"
  23831. + extra_options="${extra_options} sparc/long-double-switch.opt"
  23832. + tmake_file="${tmake_file} sparc/t-linux"
  23833. + if test x$enable_targets = xall; then
  23834. + tm_file="sparc/biarch64.h ${tm_file} sparc/linux64.h"
  23835. + tmake_file="${tmake_file} sparc/t-linux64"
  23836. + else
  23837. + tm_file="${tm_file} sparc/linux.h"
  23838. + fi
  23839. + tmake_file="${tmake_file} sparc/t-crtfm"
  23840. + ;;
  23841. +sparc-*-rtems*)
  23842. + tm_file="${tm_file} dbxelf.h elfos.h svr4.h sparc/sysv4.h sparc/sp-elf.h sparc/rtemself.h rtems.h"
  23843. + tmake_file="sparc/t-elf sparc/t-crtfm t-rtems"
  23844. + extra_parts="crti.o crtn.o crtbegin.o crtend.o"
  23845. + ;;
  23846. +sparc64-*-solaris2* | sparcv9-*-solaris2*)
  23847. + tm_file="sparc/biarch64.h ${tm_file} dbxelf.h elfos.h svr4.h sparc/sysv4.h sol2.h"
  23848. + case ${target} in
  23849. + *-*-solaris2.1[0-9]*)
  23850. + tm_file="${tm_file} sol2-10.h"
  23851. + ;;
  23852. + esac
  23853. + tm_file="${tm_file} sparc/sol2.h sparc/sol2-64.h sparc/sol2-bi.h"
  23854. + if test x$gnu_ld = xyes; then
  23855. + tm_file="${tm_file} sol2-gld.h sparc/sol2-gld-bi.h"
  23856. + fi
  23857. + if test x$gas = xyes; then
  23858. + tm_file="${tm_file} sparc/sol2-gas.h sparc/sol2-gas-bi.h"
  23859. + fi
  23860. + tm_file="${tm_file} tm-dwarf2.h"
  23861. + tmake_file="t-sol2 sparc/t-sol2 sparc/t-sol2-64 sparc/t-crtfm"
  23862. + if test x$gnu_ld = xyes; then
  23863. + tmake_file="$tmake_file t-slibgcc-elf-ver"
  23864. + else
  23865. + tmake_file="$tmake_file t-slibgcc-sld"
  23866. + fi
  23867. + if test x$gas = xyes; then
  23868. + tm_file="usegas.h ${tm_file}"
  23869. + fi
  23870. + c_target_objs="sol2-c.o"
  23871. + cxx_target_objs="sol2-c.o"
  23872. + extra_objs="sol2.o"
  23873. + tm_p_file="${tm_p_file} sol2-protos.h"
  23874. + extra_parts="crt1.o crti.o crtn.o gcrt1.o crtbegin.o crtend.o"
  23875. + case ${enable_threads}:${have_pthread_h}:${have_thread_h} in
  23876. + "":yes:* | yes:yes:* ) thread_file=posix ;;
  23877. + "":*:yes | yes:*:yes ) thread_file=solaris ;;
  23878. + esac
  23879. + ;;
  23880. +sparc-*-solaris2*)
  23881. + tm_file="${tm_file} dbxelf.h elfos.h svr4.h sparc/sysv4.h sol2.h"
  23882. + case ${target} in
  23883. + *-*-solaris2.1[0-9]*)
  23884. + tm_file="${tm_file} sol2-10.h"
  23885. + ;;
  23886. + esac
  23887. + tm_file="${tm_file} sparc/sol2.h"
  23888. + if test x$gnu_ld = xyes; then
  23889. + tm_file="${tm_file} sol2-gld.h"
  23890. + fi
  23891. + if test x$gas = xyes; then
  23892. + tm_file="${tm_file} sparc/sol2-gas.h"
  23893. + fi
  23894. + tmake_file="t-sol2 sparc/t-sol2 sparc/t-crtfm"
  23895. + if test x$gnu_ld = xyes; then
  23896. + tmake_file="$tmake_file t-slibgcc-elf-ver"
  23897. + else
  23898. + tmake_file="$tmake_file t-slibgcc-sld"
  23899. + fi
  23900. + tm_file="sparc/biarch64.h ${tm_file} sparc/sol2-bi.h"
  23901. + if test x$gnu_ld = xyes; then
  23902. + tm_file="${tm_file} sparc/sol2-gld-bi.h"
  23903. + fi
  23904. + if test x$gas = xyes; then
  23905. + tm_file="${tm_file} sparc/sol2-gas-bi.h"
  23906. + fi
  23907. + if test x$gas = xyes; then
  23908. + tm_file="usegas.h ${tm_file}"
  23909. + fi
  23910. + tm_file="${tm_file} tm-dwarf2.h"
  23911. + tmake_file="$tmake_file sparc/t-sol2-64"
  23912. + test x$with_cpu != x || with_cpu=v9
  23913. + c_target_objs="sol2-c.o"
  23914. + cxx_target_objs="sol2-c.o"
  23915. + extra_objs="sol2.o"
  23916. + tm_p_file="${tm_p_file} sol2-protos.h"
  23917. + extra_parts="crt1.o crti.o crtn.o gcrt1.o gmon.o crtbegin.o crtend.o"
  23918. + case ${enable_threads}:${have_pthread_h}:${have_thread_h} in
  23919. + "":yes:* | yes:yes:* )
  23920. + thread_file=posix
  23921. + ;;
  23922. + "":*:yes | yes:*:yes )
  23923. + thread_file=solaris
  23924. + ;;
  23925. + esac
  23926. + ;;
  23927. +sparc-wrs-vxworks)
  23928. + tm_file="${tm_file} elfos.h svr4.h sparc/sysv4.h vx-common.h vxworks.h sparc/vxworks.h"
  23929. + tmake_file="${tmake_file} sparc/t-vxworks"
  23930. + ;;
  23931. +sparc64-*-elf*)
  23932. + tm_file="${tm_file} dbxelf.h elfos.h svr4.h sparc/sysv4.h sparc/sp64-elf.h"
  23933. + extra_options="${extra_options} sparc/little-endian.opt"
  23934. + tmake_file="${tmake_file} sparc/t-crtfm"
  23935. + extra_parts="crtbegin.o crtend.o"
  23936. + ;;
  23937. +sparc64-*-freebsd*|ultrasparc-*-freebsd*)
  23938. + tm_file="${tm_file} ${fbsd_tm_file} dbxelf.h elfos.h sparc/sysv4.h sparc/freebsd.h"
  23939. + extra_options="${extra_options} sparc/long-double-switch.opt"
  23940. + tmake_file="${tmake_file} sparc/t-crtfm"
  23941. + case "x$with_cpu" in
  23942. + xultrasparc) ;;
  23943. + x) with_cpu=ultrasparc ;;
  23944. + *) echo "$with_cpu not supported for freebsd target"; exit 1 ;;
  23945. + esac
  23946. + ;;
  23947. +sparc64-*-linux*) # 64-bit SPARC's running GNU/Linux
  23948. + tm_file="sparc/biarch64.h ${tm_file} dbxelf.h elfos.h svr4.h sparc/sysv4.h sparc/gas.h linux.h sparc/linux64.h"
  23949. + extra_options="${extra_options} sparc/long-double-switch.opt"
  23950. + tmake_file="${tmake_file} sparc/t-linux sparc/t-linux64 sparc/t-crtfm"
  23951. + ;;
  23952. +sparc64-*-netbsd*)
  23953. + tm_file="sparc/biarch64.h ${tm_file}"
  23954. + tm_file="${tm_file} dbxelf.h elfos.h svr4.h sparc/sysv4.h netbsd.h netbsd-elf.h sparc/netbsd-elf.h"
  23955. + extra_options="${extra_options} sparc/long-double-switch.opt"
  23956. + tmake_file="${tmake_file} sparc/t-netbsd64"
  23957. + ;;
  23958. +spu-*-elf*)
  23959. + tm_file="dbxelf.h elfos.h spu/spu-elf.h spu/spu.h"
  23960. + tmake_file="spu/t-spu-elf"
  23961. + extra_headers="spu_intrinsics.h spu_internals.h vmx2spu.h spu_mfcio.h vec_types.h"
  23962. + extra_modes=spu/spu-modes.def
  23963. + c_target_objs="${c_target_objs} spu-c.o"
  23964. + cxx_target_objs="${cxx_target_objs} spu-c.o"
  23965. + ;;
  23966. +v850e1-*-*)
  23967. + target_cpu_default="TARGET_CPU_v850e1"
  23968. + tm_file="dbxelf.h elfos.h svr4.h v850/v850.h"
  23969. + tm_p_file=v850/v850-protos.h
  23970. + tmake_file=v850/t-v850e
  23971. + md_file=v850/v850.md
  23972. + out_file=v850/v850.c
  23973. + extra_options="${extra_options} v850/v850.opt"
  23974. + if test x$stabs = xyes
  23975. + then
  23976. + tm_file="${tm_file} dbx.h"
  23977. + fi
  23978. + use_collect2=no
  23979. + c_target_objs="v850-c.o"
  23980. + cxx_target_objs="v850-c.o"
  23981. + ;;
  23982. +v850e-*-*)
  23983. + target_cpu_default="TARGET_CPU_v850e"
  23984. + tm_file="dbxelf.h elfos.h svr4.h v850/v850.h"
  23985. + tm_p_file=v850/v850-protos.h
  23986. + tmake_file=v850/t-v850e
  23987. + md_file=v850/v850.md
  23988. + out_file=v850/v850.c
  23989. + extra_options="${extra_options} v850/v850.opt"
  23990. + if test x$stabs = xyes
  23991. + then
  23992. + tm_file="${tm_file} dbx.h"
  23993. + fi
  23994. + use_collect2=no
  23995. + c_target_objs="v850-c.o"
  23996. + cxx_target_objs="v850-c.o"
  23997. + ;;
  23998. +v850-*-*)
  23999. + target_cpu_default="TARGET_CPU_generic"
  24000. + tm_file="dbxelf.h elfos.h svr4.h ${tm_file}"
  24001. + tmake_file=v850/t-v850
  24002. + if test x$stabs = xyes
  24003. + then
  24004. + tm_file="${tm_file} dbx.h"
  24005. + fi
  24006. + use_collect2=no
  24007. + c_target_objs="v850-c.o"
  24008. + cxx_target_objs="v850-c.o"
  24009. + ;;
  24010. +vax-*-netbsdelf*)
  24011. + tm_file="${tm_file} elfos.h netbsd.h netbsd-elf.h vax/elf.h vax/netbsd-elf.h"
  24012. + ;;
  24013. +vax-*-netbsd*)
  24014. + tm_file="${tm_file} netbsd.h netbsd-aout.h vax/netbsd.h"
  24015. + tmake_file=t-netbsd
  24016. + extra_parts=""
  24017. + use_collect2=yes
  24018. + ;;
  24019. +vax-*-openbsd*)
  24020. + tm_file="vax/vax.h vax/openbsd1.h openbsd.h vax/openbsd.h"
  24021. + use_collect2=yes
  24022. + ;;
  24023. +xstormy16-*-elf)
  24024. + # For historical reasons, the target files omit the 'x'.
  24025. + tm_file="dbxelf.h elfos.h svr4.h stormy16/stormy16.h"
  24026. + tm_p_file=stormy16/stormy16-protos.h
  24027. + md_file=stormy16/stormy16.md
  24028. + out_file=stormy16/stormy16.c
  24029. + extra_options=stormy16/stormy16.opt
  24030. + tmake_file="stormy16/t-stormy16"
  24031. + extra_parts="crtbegin.o crtend.o"
  24032. + ;;
  24033. +xtensa*-*-elf*)
  24034. + tm_file="${tm_file} dbxelf.h elfos.h svr4.h xtensa/elf.h"
  24035. + tmake_file="xtensa/t-xtensa xtensa/t-elf"
  24036. + ;;
  24037. +xtensa*-*-linux*)
  24038. + tm_file="${tm_file} dbxelf.h elfos.h svr4.h linux.h xtensa/linux.h"
  24039. + tmake_file="${tmake_file} xtensa/t-xtensa xtensa/t-linux"
  24040. + ;;
  24041. +am33_2.0-*-linux*)
  24042. + tm_file="mn10300/mn10300.h dbxelf.h elfos.h linux.h mn10300/linux.h"
  24043. + tmake_file="${tmake_file} mn10300/t-linux"
  24044. + gas=yes gnu_ld=yes
  24045. + extra_parts="crtbegin.o crtend.o crtbeginS.o crtendS.o"
  24046. + use_collect2=no
  24047. + ;;
  24048. +m32c-*-rtems*)
  24049. + tm_file="dbxelf.h elfos.h svr4.h ${tm_file} m32c/rtems.h rtems.h"
  24050. + tmake_file="${tmake_file} t-rtems"
  24051. + c_target_objs="m32c-pragma.o"
  24052. + cxx_target_objs="m32c-pragma.o"
  24053. + ;;
  24054. +m32c-*-elf*)
  24055. + tm_file="dbxelf.h elfos.h svr4.h ${tm_file}"
  24056. + c_target_objs="m32c-pragma.o"
  24057. + cxx_target_objs="m32c-pragma.o"
  24058. + ;;
  24059. +*)
  24060. + echo "*** Configuration ${target} not supported" 1>&2
  24061. + exit 1
  24062. + ;;
  24063. +esac
  24064. +
  24065. +case ${target} in
  24066. +i[34567]86-*-linux* | x86_64-*-linux*)
  24067. + tmake_file="${tmake_file} i386/t-pmm_malloc i386/t-i386"
  24068. + ;;
  24069. +i[34567]86-*-* | x86_64-*-*)
  24070. + tmake_file="${tmake_file} i386/t-gmm_malloc i386/t-i386"
  24071. + ;;
  24072. +esac
  24073. +
  24074. +# Support for --with-cpu and related options (and a few unrelated options,
  24075. +# too).
  24076. +case ${with_cpu} in
  24077. + yes | no)
  24078. + echo "--with-cpu must be passed a value" 1>&2
  24079. + exit 1
  24080. + ;;
  24081. +esac
  24082. +
  24083. +# If there is no $with_cpu option, try to infer one from ${target}.
  24084. +# This block sets nothing except for with_cpu.
  24085. +if test x$with_cpu = x ; then
  24086. + case ${target} in
  24087. + i386-*-*)
  24088. + with_cpu=i386
  24089. + ;;
  24090. + i486-*-*)
  24091. + with_cpu=i486
  24092. + ;;
  24093. + i586-*-*)
  24094. + case ${target_noncanonical} in
  24095. + k6_2-*)
  24096. + with_cpu=k6-2
  24097. + ;;
  24098. + k6_3-*)
  24099. + with_cpu=k6-3
  24100. + ;;
  24101. + k6-*)
  24102. + with_cpu=k6
  24103. + ;;
  24104. + pentium_mmx-*|winchip_c6-*|winchip2-*|c3-*)
  24105. + with_cpu=pentium-mmx
  24106. + ;;
  24107. + *)
  24108. + with_cpu=pentium
  24109. + ;;
  24110. + esac
  24111. + ;;
  24112. + i686-*-* | i786-*-*)
  24113. + case ${target_noncanonical} in
  24114. + amdfam10-*|barcelona-*)
  24115. + with_cpu=amdfam10
  24116. + ;;
  24117. + k8_sse3-*|opteron_sse3-*|athlon64_sse3-*)
  24118. + with_cpu=k8-sse3
  24119. + ;;
  24120. + k8-*|opteron-*|athlon64-*|athlon_fx-*)
  24121. + with_cpu=k8
  24122. + ;;
  24123. + athlon_xp-*|athlon_mp-*|athlon_4-*)
  24124. + with_cpu=athlon-4
  24125. + ;;
  24126. + athlon_tbird-*|athlon-*)
  24127. + with_cpu=athlon
  24128. + ;;
  24129. + geode-*)
  24130. + with_cpu=geode
  24131. + ;;
  24132. + pentium2-*)
  24133. + with_cpu=pentium2
  24134. + ;;
  24135. + pentium3-*|pentium3m-*)
  24136. + with_cpu=pentium3
  24137. + ;;
  24138. + pentium4-*|pentium4m-*)
  24139. + with_cpu=pentium4
  24140. + ;;
  24141. + prescott-*)
  24142. + with_cpu=prescott
  24143. + ;;
  24144. + nocona-*)
  24145. + with_cpu=nocona
  24146. + ;;
  24147. + core2-*)
  24148. + with_cpu=core2
  24149. + ;;
  24150. + pentium_m-*)
  24151. + with_cpu=pentium-m
  24152. + ;;
  24153. + pentiumpro-*)
  24154. + with_cpu=pentiumpro
  24155. + ;;
  24156. + *)
  24157. + with_cpu=generic
  24158. + ;;
  24159. + esac
  24160. + ;;
  24161. + x86_64-*-*)
  24162. + case ${target_noncanonical} in
  24163. + amdfam10-*|barcelona-*)
  24164. + with_cpu=amdfam10
  24165. + ;;
  24166. + k8_sse3-*|opteron_sse3-*|athlon64_sse3-*)
  24167. + with_cpu=k8-sse3
  24168. + ;;
  24169. + k8-*|opteron-*|athlon64-*|athlon_fx-*)
  24170. + with_cpu=k8
  24171. + ;;
  24172. + nocona-*)
  24173. + with_cpu=nocona
  24174. + ;;
  24175. + core2-*)
  24176. + with_cpu=core2
  24177. + ;;
  24178. + *)
  24179. + with_cpu=generic
  24180. + ;;
  24181. + esac
  24182. + ;;
  24183. + alphaev6[78]*-*-*)
  24184. + with_cpu=ev67
  24185. + ;;
  24186. + alphaev6*-*-*)
  24187. + with_cpu=ev6
  24188. + ;;
  24189. + alphapca56*-*-*)
  24190. + with_cpu=pca56
  24191. + ;;
  24192. + alphaev56*-*-*)
  24193. + with_cpu=ev56
  24194. + ;;
  24195. + alphaev5*-*-*)
  24196. + with_cpu=ev5
  24197. + ;;
  24198. + frv-*-*linux* | frv400-*-*linux*)
  24199. + with_cpu=fr400
  24200. + ;;
  24201. + frv550-*-*linux*)
  24202. + with_cpu=fr550
  24203. + ;;
  24204. + m68k*-*-*)
  24205. + case "$with_arch" in
  24206. + "cf")
  24207. + with_cpu=${default_cf_cpu}
  24208. + ;;
  24209. + "" | "m68k")
  24210. + with_cpu=m${default_m68k_cpu}
  24211. + ;;
  24212. + esac
  24213. + ;;
  24214. + mips*-*-vxworks)
  24215. + with_arch=mips2
  24216. + ;;
  24217. + sparc*-*-*)
  24218. + with_cpu="`echo ${target} | sed 's/-.*$//'`"
  24219. + ;;
  24220. + esac
  24221. +
  24222. + # Avoid overriding --with-cpu-32 and --with-cpu-64 values.
  24223. + case ${target} in
  24224. + i[34567]86-*-*|x86_64-*-*)
  24225. + if test x$with_cpu != x; then
  24226. + if test x$with_cpu_32 != x || test x$with_cpu_64 != x; then
  24227. + if test x$with_cpu_32 = x; then
  24228. + with_cpu_32=$with_cpu
  24229. + fi
  24230. + if test x$with_cpu_64 = x; then
  24231. + with_cpu_64=$with_cpu
  24232. + fi
  24233. + with_cpu=
  24234. + fi
  24235. + fi
  24236. + ;;
  24237. + esac
  24238. +fi
  24239. +
  24240. +# Similarly for --with-schedule.
  24241. +if test x$with_schedule = x; then
  24242. + case ${target} in
  24243. + hppa1*)
  24244. + # Override default PA8000 scheduling model.
  24245. + with_schedule=7100LC
  24246. + ;;
  24247. + esac
  24248. +fi
  24249. +
  24250. +# Validate and mark as valid any --with options supported
  24251. +# by this target. In order to use a particular --with option
  24252. +# you must list it in supported_defaults; validating the value
  24253. +# is optional. This case statement should set nothing besides
  24254. +# supported_defaults.
  24255. +
  24256. +supported_defaults=
  24257. +case "${target}" in
  24258. + alpha*-*-*)
  24259. + supported_defaults="cpu tune"
  24260. + for which in cpu tune; do
  24261. + eval "val=\$with_$which"
  24262. + case "$val" in
  24263. + "" \
  24264. + | ev4 | ev45 | 21064 | ev5 | 21164 | ev56 | 21164a \
  24265. + | pca56 | 21164PC | 21164pc | ev6 | 21264 | ev67 \
  24266. + | 21264a)
  24267. + ;;
  24268. + *)
  24269. + echo "Unknown CPU used in --with-$which=$val" 1>&2
  24270. + exit 1
  24271. + ;;
  24272. + esac
  24273. + done
  24274. + ;;
  24275. +
  24276. + arm*-*-*)
  24277. + supported_defaults="arch cpu float tune fpu abi mode"
  24278. + for which in cpu tune; do
  24279. + # See if it matches any of the entries in arm-cores.def
  24280. + eval "val=\$with_$which"
  24281. + if [ x"$val" = x ] \
  24282. + || grep "^ARM_CORE(\"$val\"," \
  24283. + ${srcdir}/config/arm/arm-cores.def \
  24284. + > /dev/null; then
  24285. + # Ok
  24286. + new_val=`grep "^ARM_CORE(\"$val\"," \
  24287. + ${srcdir}/config/arm/arm-cores.def | \
  24288. + sed -e 's/^[^,]*,[ ]*//' | \
  24289. + sed -e 's/,.*$//'`
  24290. + eval "target_${which}_cname=$new_val"
  24291. + echo "For $val real value is $new_val"
  24292. + true
  24293. + else
  24294. + echo "Unknown CPU used in --with-$which=$val" 1>&2
  24295. + exit 1
  24296. + fi
  24297. + done
  24298. +
  24299. + case "$with_arch" in
  24300. + "" \
  24301. + | armv[23456] | armv2a | armv3m | armv4t | armv5t \
  24302. + | armv5te | armv6j |armv6k | armv6z | armv6zk | armv6-m \
  24303. + | armv7 | armv7-a | armv7-r | armv7-m \
  24304. + | iwmmxt | ep9312)
  24305. + # OK
  24306. + ;;
  24307. + *)
  24308. + echo "Unknown arch used in --with-arch=$with_arch" 1>&2
  24309. + exit 1
  24310. + ;;
  24311. + esac
  24312. +
  24313. + case "$with_float" in
  24314. + "" \
  24315. + | soft | hard | softfp)
  24316. + # OK
  24317. + ;;
  24318. + *)
  24319. + echo "Unknown floating point type used in --with-float=$with_float" 1>&2
  24320. + exit 1
  24321. + ;;
  24322. + esac
  24323. +
  24324. + case "$with_fpu" in
  24325. + "" \
  24326. + | fpa | fpe2 | fpe3 | maverick | vfp | vfp3 | vfpv3 | vfpv3-d16 | neon )
  24327. + # OK
  24328. + ;;
  24329. + *)
  24330. + echo "Unknown fpu used in --with-fpu=$with_fpu" 2>&1
  24331. + exit 1
  24332. + ;;
  24333. + esac
  24334. +
  24335. + case "$with_abi" in
  24336. + "" \
  24337. + | apcs-gnu | atpcs | aapcs | iwmmxt | aapcs-linux )
  24338. + #OK
  24339. + ;;
  24340. + *)
  24341. + echo "Unknown ABI used in --with-abi=$with_abi"
  24342. + exit 1
  24343. + ;;
  24344. + esac
  24345. +
  24346. + case "$with_mode" in
  24347. + "" \
  24348. + | arm | thumb )
  24349. + #OK
  24350. + ;;
  24351. + *)
  24352. + echo "Unknown mode used in --with-mode=$with_mode"
  24353. + exit 1
  24354. + ;;
  24355. + esac
  24356. +
  24357. + if test "x$with_arch" != x && test "x$with_cpu" != x; then
  24358. + echo "Warning: --with-arch overrides --with-cpu=$with_cpu" 1>&2
  24359. + fi
  24360. + ;;
  24361. +
  24362. + fr*-*-*linux*)
  24363. + supported_defaults=cpu
  24364. + case "$with_cpu" in
  24365. + fr400) ;;
  24366. + fr550) ;;
  24367. + *)
  24368. + echo "Unknown cpu used in --with-cpu=$with_cpu" 1>&2
  24369. + exit 1
  24370. + ;;
  24371. + esac
  24372. + ;;
  24373. +
  24374. + fido-*-* | m68k*-*-*)
  24375. + supported_defaults="arch cpu"
  24376. + case "$with_arch" in
  24377. + "" | "m68k"| "cf")
  24378. + m68k_arch_family="$with_arch"
  24379. + ;;
  24380. + *)
  24381. + echo "Invalid --with-arch=$with_arch" 1>&2
  24382. + exit 1
  24383. + ;;
  24384. + esac
  24385. +
  24386. + # We always have a $with_cpu setting here.
  24387. + case "$with_cpu" in
  24388. + "m68000" | "m68010" | "m68020" | "m68030" | "m68040" | "m68060")
  24389. + m68k_cpu_ident=$with_cpu
  24390. + ;;
  24391. + "m68020-40")
  24392. + m68k_cpu_ident=m68020
  24393. + tm_defines="$tm_defines M68K_DEFAULT_TUNE=u68020_40"
  24394. + ;;
  24395. + "m68020-60")
  24396. + m68k_cpu_ident=m68020
  24397. + tm_defines="$tm_defines M68K_DEFAULT_TUNE=u68020_60"
  24398. + ;;
  24399. + *)
  24400. + # We need the C identifier rather than the string.
  24401. + m68k_cpu_ident=`awk -v arg="\"$with_cpu\"" \
  24402. + 'BEGIN { FS="[ \t]*[,()][ \t]*" }; \
  24403. + $1 == "M68K_DEVICE" && $2 == arg { print $3 }' \
  24404. + ${srcdir}/config/m68k/m68k-devices.def`
  24405. + if [ x"$m68k_cpu_ident" = x ] ; then
  24406. + echo "Unknown CPU used in --with-cpu=$with_cpu" 1>&2
  24407. + exit 1
  24408. + fi
  24409. + with_cpu="mcpu=$with_cpu"
  24410. + ;;
  24411. + esac
  24412. + ;;
  24413. +
  24414. + hppa*-*-*)
  24415. + supported_defaults="arch schedule"
  24416. +
  24417. + case "$with_arch" in
  24418. + "" | 1.0 | 1.1 | 2.0)
  24419. + # OK
  24420. + ;;
  24421. + *)
  24422. + echo "Unknown architecture used in --with-arch=$with_arch" 1>&2
  24423. + exit 1
  24424. + ;;
  24425. + esac
  24426. +
  24427. + case "$with_schedule" in
  24428. + "" | 700 | 7100 | 7100LC | 7200 | 7300 | 8000)
  24429. + # OK
  24430. + ;;
  24431. + *)
  24432. + echo "Unknown processor used in --with-schedule=$with_schedule." 1>&2
  24433. + exit 1
  24434. + ;;
  24435. + esac
  24436. + ;;
  24437. +
  24438. + i[34567]86-*-* | x86_64-*-*)
  24439. + supported_defaults="arch arch_32 arch_64 cpu cpu_32 cpu_64 tune tune_32 tune_64"
  24440. + for which in arch arch_32 arch_64 cpu cpu_32 cpu_64 tune tune_32 tune_64; do
  24441. + eval "val=\$with_$which"
  24442. + case ${val} in
  24443. + i386 | i486 \
  24444. + | i586 | pentium | pentium-mmx | winchip-c6 | winchip2 \
  24445. + | c3 | c3-2 | i686 | pentiumpro | pentium2 | pentium3 \
  24446. + | pentium4 | k6 | k6-2 | k6-3 | athlon | athlon-tbird \
  24447. + | athlon-4 | athlon-xp | athlon-mp | geode \
  24448. + | prescott | pentium-m | pentium4m | pentium3m)
  24449. + case "${target}" in
  24450. + x86_64-*-*)
  24451. + case "x$which" in
  24452. + *_32)
  24453. + ;;
  24454. + *)
  24455. + echo "CPU given in --with-$which=$val doesn't support 64bit mode." 1>&2
  24456. + exit 1
  24457. + ;;
  24458. + esac
  24459. + ;;
  24460. + esac
  24461. + # OK
  24462. + ;;
  24463. + "" | amdfam10 | barcelona | k8-sse3 | opteron-sse3 | athlon64-sse3 | k8 | opteron | athlon64 | athlon-fx | nocona | core2 | generic)
  24464. + # OK
  24465. + ;;
  24466. + *)
  24467. + echo "Unknown CPU given in --with-$which=$val." 1>&2
  24468. + exit 1
  24469. + ;;
  24470. + esac
  24471. + done
  24472. + ;;
  24473. +
  24474. + mips*-*-*)
  24475. + supported_defaults="abi arch float tune divide llsc mips-plt"
  24476. +
  24477. + case ${with_float} in
  24478. + "" | soft | hard)
  24479. + # OK
  24480. + ;;
  24481. + *)
  24482. + echo "Unknown floating point type used in --with-float=$with_float" 1>&2
  24483. + exit 1
  24484. + ;;
  24485. + esac
  24486. +
  24487. + case ${with_abi} in
  24488. + "" | 32 | o64 | n32 | 64 | eabi)
  24489. + # OK
  24490. + ;;
  24491. + *)
  24492. + echo "Unknown ABI used in --with-abi=$with_abi" 1>&2
  24493. + exit 1
  24494. + ;;
  24495. + esac
  24496. +
  24497. + case ${with_divide} in
  24498. + "" | breaks | traps)
  24499. + # OK
  24500. + ;;
  24501. + *)
  24502. + echo "Unknown division check type use in --with-divide=$with_divide" 1>&2
  24503. + exit 1
  24504. + ;;
  24505. + esac
  24506. +
  24507. + case ${with_llsc} in
  24508. + yes)
  24509. + with_llsc=llsc
  24510. + ;;
  24511. + no)
  24512. + with_llsc="no-llsc"
  24513. + ;;
  24514. + "")
  24515. + # OK
  24516. + ;;
  24517. + *)
  24518. + echo "Unknown llsc type used in --with-llsc" 1>&2
  24519. + exit 1
  24520. + ;;
  24521. + esac
  24522. +
  24523. + case ${with_mips_plt} in
  24524. + yes)
  24525. + with_mips_plt=plt
  24526. + ;;
  24527. + no)
  24528. + with_mips_plt=no-plt
  24529. + ;;
  24530. + "")
  24531. + ;;
  24532. + *)
  24533. + echo "Unknown --with-mips-plt argument: $with_mips_plt" 1>&2
  24534. + exit 1
  24535. + ;;
  24536. + esac
  24537. + ;;
  24538. +
  24539. + powerpc*-*-* | rs6000-*-*)
  24540. + supported_defaults="cpu float tune"
  24541. +
  24542. + for which in cpu tune; do
  24543. + eval "val=\$with_$which"
  24544. + case ${val} in
  24545. + default32 | default64)
  24546. + with_which="with_$which"
  24547. + eval $with_which=
  24548. + ;;
  24549. + 405cr)
  24550. + tm_defines="${tm_defines} CONFIG_PPC405CR"
  24551. + eval "with_$which=405"
  24552. + ;;
  24553. + "" | common \
  24554. + | power | power[234567] | power6x | powerpc | powerpc64 \
  24555. + | rios | rios1 | rios2 | rsc | rsc1 | rs64a \
  24556. + | 401 | 403 | 405 | 405fp | 440 | 440fp | 464 | 464fp \
  24557. + | 505 | 601 | 602 | 603 | 603e | ec603e | 604 \
  24558. + | 604e | 620 | 630 | 740 | 750 | 7400 | 7450 \
  24559. + | e300c[23] | 854[08] | e500mc \
  24560. + | 801 | 821 | 823 | 860 | 970 | G3 | G4 | G5 | cell)
  24561. + # OK
  24562. + ;;
  24563. + *)
  24564. + echo "Unknown cpu used in --with-$which=$val." 1>&2
  24565. + exit 1
  24566. + ;;
  24567. + esac
  24568. + done
  24569. + ;;
  24570. +
  24571. + s390*-*-*)
  24572. + supported_defaults="arch mode tune"
  24573. +
  24574. + for which in arch tune; do
  24575. + eval "val=\$with_$which"
  24576. + case ${val} in
  24577. + "" | g5 | g6 | z900 | z990 | z9-109 | z9-ec | z10)
  24578. + # OK
  24579. + ;;
  24580. + *)
  24581. + echo "Unknown cpu used in --with-$which=$val." 1>&2
  24582. + exit 1
  24583. + ;;
  24584. + esac
  24585. + done
  24586. +
  24587. + case ${with_mode} in
  24588. + "" | esa | zarch)
  24589. + # OK
  24590. + ;;
  24591. + *)
  24592. + echo "Unknown architecture mode used in --with-mode=$with_mode." 1>&2
  24593. + exit 1
  24594. + ;;
  24595. + esac
  24596. + ;;
  24597. +
  24598. + sh[123456ble]-*-* | sh-*-*)
  24599. + supported_defaults="cpu"
  24600. + case "`echo $with_cpu | tr ABCDEFGHIJKLMNOPQRSTUVWXYZ_ abcdefghijklmnopqrstuvwxyz- | sed s/sh/m/`" in
  24601. + "" | m1 | m2 | m2e | m3 | m3e | m4 | m4-single | m4-single-only | m4-nofpu )
  24602. + # OK
  24603. + ;;
  24604. + m2a | m2a-single | m2a-single-only | m2a-nofpu)
  24605. + ;;
  24606. + m4a | m4a-single | m4a-single-only | m4a-nofpu | m4al)
  24607. + ;;
  24608. + *)
  24609. + echo "Unknown CPU used in --with-cpu=$with_cpu, known values:" 1>&2
  24610. + echo "m1 m2 m2e m3 m3e m4 m4-single m4-single-only m4-nofpu" 1>&2
  24611. + echo "m4a m4a-single m4a-single-only m4a-nofpu m4al" 1>&2
  24612. + echo "m2a m2a-single m2a-single-only m2a-nofpu" 1>&2
  24613. + exit 1
  24614. + ;;
  24615. + esac
  24616. + ;;
  24617. + sparc*-*-*)
  24618. + supported_defaults="cpu float tune"
  24619. +
  24620. + for which in cpu tune; do
  24621. + eval "val=\$with_$which"
  24622. + case ${val} in
  24623. + "" | sparc | sparcv9 | sparc64 | sparc86x \
  24624. + | v7 | cypress | v8 | supersparc | sparclite | f930 \
  24625. + | f934 | hypersparc | sparclite86x | sparclet | tsc701 \
  24626. + | v9 | ultrasparc | ultrasparc3 | niagara | niagara2)
  24627. + # OK
  24628. + ;;
  24629. + *)
  24630. + echo "Unknown cpu used in --with-$which=$val" 1>&2
  24631. + exit 1
  24632. + ;;
  24633. + esac
  24634. + done
  24635. +
  24636. + case ${with_float} in
  24637. + "" | soft | hard)
  24638. + # OK
  24639. + ;;
  24640. + *)
  24641. + echo "Unknown floating point type used in --with-float=$with_float" 1>&2
  24642. + exit 1
  24643. + ;;
  24644. + esac
  24645. + ;;
  24646. +
  24647. + spu-*-*)
  24648. + supported_defaults="arch tune"
  24649. +
  24650. + for which in arch tune; do
  24651. + eval "val=\$with_$which"
  24652. + case ${val} in
  24653. + "" | cell | celledp)
  24654. + # OK
  24655. + ;;
  24656. + *)
  24657. + echo "Unknown cpu used in --with-$which=$val." 1>&2
  24658. + exit 1
  24659. + ;;
  24660. + esac
  24661. + done
  24662. + ;;
  24663. +
  24664. + v850*-*-*)
  24665. + supported_defaults=cpu
  24666. + case ${with_cpu} in
  24667. + "" | v850e | v850e1)
  24668. + # OK
  24669. + ;;
  24670. + *)
  24671. + echo "Unknown cpu used in --with-cpu=$with_cpu" 1>&2
  24672. + exit 1
  24673. + ;;
  24674. + esac
  24675. + ;;
  24676. +esac
  24677. +
  24678. +# Set some miscellaneous flags for particular targets.
  24679. +target_cpu_default2=
  24680. +case ${target} in
  24681. + alpha*-*-*)
  24682. + if test x$gas = xyes
  24683. + then
  24684. + target_cpu_default2="MASK_GAS"
  24685. + fi
  24686. + ;;
  24687. +
  24688. + arm*-*-*)
  24689. + if test x$target_cpu_cname = x
  24690. + then
  24691. + target_cpu_default2=TARGET_CPU_generic
  24692. + else
  24693. + target_cpu_default2=TARGET_CPU_$target_cpu_cname
  24694. + fi
  24695. + ;;
  24696. +
  24697. + hppa*-*-*)
  24698. + target_cpu_default2="MASK_BIG_SWITCH"
  24699. + if test x$gas = xyes
  24700. + then
  24701. + target_cpu_default2="${target_cpu_default2}|MASK_GAS|MASK_JUMP_IN_DELAY"
  24702. + fi
  24703. + ;;
  24704. +
  24705. + fido*-*-* | m68k*-*-*)
  24706. + target_cpu_default2=$m68k_cpu_ident
  24707. + if [ x"$m68k_arch_family" != x ]; then
  24708. + tmake_file="m68k/t-$m68k_arch_family $tmake_file"
  24709. + fi
  24710. + ;;
  24711. +
  24712. + i[34567]86-*-darwin* | x86_64-*-darwin*)
  24713. + tmake_file="${tmake_file} i386/t-fprules-softfp soft-fp/t-softfp"
  24714. + ;;
  24715. + i[34567]86-*-linux* | x86_64-*-linux* | i[34567]86-*-kfreebsd*-gnu | x86_64-*-kfreebsd*-gnu)
  24716. + tmake_file="${tmake_file} i386/t-fprules-softfp soft-fp/t-softfp i386/t-linux"
  24717. + ;;
  24718. + ia64*-*-linux*)
  24719. + tmake_file="${tmake_file} ia64/t-fprules-softfp soft-fp/t-softfp"
  24720. + ;;
  24721. +
  24722. + mips*-*-*)
  24723. + if test x$gnu_ld = xyes
  24724. + then
  24725. + target_cpu_default2="MASK_SPLIT_ADDRESSES"
  24726. + fi
  24727. + case ${target} in
  24728. + mips*el-*-*)
  24729. + tm_defines="TARGET_ENDIAN_DEFAULT=0 $tm_defines"
  24730. + ;;
  24731. + esac
  24732. + if test "x$enable_gofast" = xyes
  24733. + then
  24734. + tm_defines="US_SOFTWARE_GOFAST $tm_defines"
  24735. + tmake_file="mips/t-gofast $tmake_file"
  24736. + else
  24737. + tmake_file="mips/t-mips $tmake_file"
  24738. + fi
  24739. + ;;
  24740. +
  24741. + powerpc*-*-* | rs6000-*-*)
  24742. + # FIXME: The PowerPC port uses the value set at compile time,
  24743. + # although it's only cosmetic.
  24744. + if test "x$with_cpu" != x
  24745. + then
  24746. + target_cpu_default2="\\\"$with_cpu\\\""
  24747. + fi
  24748. + out_file=rs6000/rs6000.c
  24749. + c_target_objs="${c_target_objs} rs6000-c.o"
  24750. + cxx_target_objs="${cxx_target_objs} rs6000-c.o"
  24751. + tmake_file="rs6000/t-rs6000 ${tmake_file}"
  24752. +
  24753. + if test x$enable_e500_double = xyes
  24754. + then
  24755. + tm_file="$tm_file rs6000/e500-double.h"
  24756. + fi
  24757. + ;;
  24758. +
  24759. + sh[123456ble]*-*-* | sh-*-*)
  24760. + c_target_objs="${c_target_objs} sh-c.o"
  24761. + cxx_target_objs="${cxx_target_objs} sh-c.o"
  24762. + ;;
  24763. +
  24764. + sparc*-*-*)
  24765. + # Some standard aliases.
  24766. + case x$with_cpu in
  24767. + xsparc)
  24768. + with_cpu=v7
  24769. + ;;
  24770. + xsparcv9 | xsparc64)
  24771. + with_cpu=v9
  24772. + ;;
  24773. + esac
  24774. +
  24775. + # The SPARC port checks this value at compile-time.
  24776. + target_cpu_default2="TARGET_CPU_$with_cpu"
  24777. + ;;
  24778. + v850*-*-*)
  24779. + # FIXME: The v850 is "special" in that it does not support
  24780. + # runtime CPU selection, only --with-cpu.
  24781. + case "x$with_cpu" in
  24782. + x)
  24783. + ;;
  24784. + xv850e)
  24785. + target_cpu_default2="TARGET_CPU_$with_cpu"
  24786. + ;;
  24787. + esac
  24788. + ;;
  24789. +esac
  24790. +
  24791. +t=
  24792. +all_defaults="abi cpu cpu_32 cpu_64 arch arch_32 arch_64 tune tune_32 tune_64 schedule float mode fpu divide llsc mips-plt"
  24793. +for option in $all_defaults
  24794. +do
  24795. + eval "val=\$with_"`echo $option | sed s/-/_/g`
  24796. + if test -n "$val"; then
  24797. + case " $supported_defaults " in
  24798. + *" $option "*)
  24799. + ;;
  24800. + *)
  24801. + echo "This target does not support --with-$option." 2>&1
  24802. + echo "Valid --with options are: $supported_defaults" 2>&1
  24803. + exit 1
  24804. + ;;
  24805. + esac
  24806. +
  24807. + if test "x$t" = x
  24808. + then
  24809. + t="{ \"$option\", \"$val\" }"
  24810. + else
  24811. + t="${t}, { \"$option\", \"$val\" }"
  24812. + fi
  24813. + fi
  24814. +done
  24815. +
  24816. +if test "x$t" = x
  24817. +then
  24818. + configure_default_options="{ { NULL, NULL} }"
  24819. +else
  24820. + configure_default_options="{ ${t} }"
  24821. +fi
  24822. +
  24823. +if test "$target_cpu_default2" != ""
  24824. +then
  24825. + if test "$target_cpu_default" != ""
  24826. + then
  24827. + target_cpu_default="(${target_cpu_default}|${target_cpu_default2})"
  24828. + else
  24829. + target_cpu_default=$target_cpu_default2
  24830. + fi
  24831. +fi
  24832. diff -Nur gcc-4.4.6.orig/gcc/configure.ac gcc-4.4.6/gcc/configure.ac
  24833. --- gcc-4.4.6.orig/gcc/configure.ac 2010-12-13 19:19:43.000000000 +0100
  24834. +++ gcc-4.4.6/gcc/configure.ac 2011-10-22 19:23:08.532581301 +0200
  24835. @@ -2240,10 +2240,9 @@
  24836. as_ver=`$gcc_cv_as --version 2>/dev/null | sed 1q`
  24837. if echo "$as_ver" | grep GNU > /dev/null; then
  24838. changequote(,)dnl
  24839. - as_vers=`echo $as_ver | sed -n \
  24840. - -e 's,^.*[ ]\([0-9][0-9]*\.[0-9][0-9]*.*\)$,\1,p'`
  24841. - as_major=`expr "$as_vers" : '\([0-9]*\)'`
  24842. - as_minor=`expr "$as_vers" : '[0-9]*\.\([0-9]*\)'`
  24843. + as_ver=`echo $as_ver | sed -e 's/GNU assembler\( (GNU Binutils)\)\? \([0-9.][0-9.]*\).*/\2/'`
  24844. + as_major=`echo $as_ver | sed 's/\..*//'`
  24845. + as_minor=`echo $as_ver | sed 's/[^.]*\.\([0-9]*\).*/\1/'`
  24846. changequote([,])dnl
  24847. if test $as_major -eq 2 && test $as_minor -lt 11
  24848. then :
  24849. @@ -3308,7 +3307,7 @@
  24850. i?86*-*-* | mips*-*-* | alpha*-*-* | powerpc*-*-* | sparc*-*-* | m68*-*-* \
  24851. | x86_64*-*-* | hppa*-*-* | arm*-*-* \
  24852. | xstormy16*-*-* | cris-*-* | crisv32-*-* | xtensa*-*-* | bfin-*-* | score*-*-* \
  24853. - | spu-*-* | fido*-*-* | m32c-*-*)
  24854. + | spu-*-* | fido*-*-* | m32c-*-* | avr32-*-*)
  24855. insn="nop"
  24856. ;;
  24857. ia64*-*-* | s390*-*-*)
  24858. diff -Nur gcc-4.4.6.orig/gcc/doc/extend.texi gcc-4.4.6/gcc/doc/extend.texi
  24859. --- gcc-4.4.6.orig/gcc/doc/extend.texi 2011-03-23 22:45:18.000000000 +0100
  24860. +++ gcc-4.4.6/gcc/doc/extend.texi 2011-10-22 19:23:08.532581301 +0200
  24861. @@ -2397,7 +2397,7 @@
  24862. @item interrupt
  24863. @cindex interrupt handler functions
  24864. -Use this attribute on the ARM, AVR, CRX, M32C, M32R/D, m68k,
  24865. +Use this attribute on the ARM, AVR, AVR32, CRX, M32C, M32R/D, m68k,
  24866. and Xstormy16 ports to indicate that the specified function is an
  24867. interrupt handler. The compiler will generate function entry and exit
  24868. sequences suitable for use in an interrupt handler when this attribute
  24869. @@ -2417,6 +2417,15 @@
  24870. Permissible values for this parameter are: IRQ, FIQ, SWI, ABORT and UNDEF@.
  24871. +Note, for the AVR32, you can specify which banking scheme is used for
  24872. +the interrupt mode this interrupt handler is used in like this:
  24873. +
  24874. +@smallexample
  24875. +void f () __attribute__ ((interrupt ("FULL")));
  24876. +@end smallexample
  24877. +
  24878. +Permissible values for this parameter are: FULL, HALF, NONE and UNDEF.
  24879. +
  24880. On ARMv7-M the interrupt type is ignored, and the attribute means the function
  24881. may be called with a word aligned stack pointer.
  24882. @@ -4188,6 +4197,23 @@
  24883. @end table
  24884. +@subsection AVR32 Variable Attributes
  24885. +
  24886. +One attribute is currently defined for AVR32 configurations:
  24887. +@code{rmw_addressable}
  24888. +
  24889. +@table @code
  24890. +@item rmw_addressable
  24891. +@cindex @code{rmw_addressable} attribute
  24892. +
  24893. +This attribute can be used to signal that a variable can be accessed
  24894. +with the addressing mode of the AVR32 Atomic Read-Modify-Write memory
  24895. +instructions and hence make it possible for gcc to generate these
  24896. +instructions without using built-in functions or inline assembly statements.
  24897. +Variables used within the AVR32 Atomic Read-Modify-Write built-in
  24898. +functions will automatically get the @code{rmw_addressable} attribute.
  24899. +@end table
  24900. +
  24901. @subsection AVR Variable Attributes
  24902. @table @code
  24903. @@ -7042,6 +7068,7 @@
  24904. * Alpha Built-in Functions::
  24905. * ARM iWMMXt Built-in Functions::
  24906. * ARM NEON Intrinsics::
  24907. +* AVR32 Built-in Functions::
  24908. * Blackfin Built-in Functions::
  24909. * FR-V Built-in Functions::
  24910. * X86 Built-in Functions::
  24911. @@ -7284,6 +7311,7 @@
  24912. long long __builtin_arm_wzero ()
  24913. @end smallexample
  24914. +
  24915. @node ARM NEON Intrinsics
  24916. @subsection ARM NEON Intrinsics
  24917. @@ -7292,6 +7320,74 @@
  24918. @include arm-neon-intrinsics.texi
  24919. +@node AVR32 Built-in Functions
  24920. +@subsection AVR32 Built-in Functions
  24921. +
  24922. +Built-in functions for atomic memory (RMW) instructions. Note that these
  24923. +built-ins will fail for targets where the RMW instructions are not
  24924. +implemented. Also note that these instructions only that a Ks15 << 2
  24925. +memory address and will therefor not work with any runtime computed
  24926. +memory addresses. The user is responsible for making sure that any
  24927. +pointers used within these functions points to a valid memory address.
  24928. +
  24929. +@smallexample
  24930. +void __builtin_mems(int */*ptr*/, int /*bit*/)
  24931. +void __builtin_memc(int */*ptr*/, int /*bit*/)
  24932. +void __builtin_memt(int */*ptr*/, int /*bit*/)
  24933. +@end smallexample
  24934. +
  24935. +Built-in functions for DSP instructions. Note that these built-ins will
  24936. +fail for targets where the DSP instructions are not implemented.
  24937. +
  24938. +@smallexample
  24939. +int __builtin_sats (int /*Rd*/,int /*sa*/, int /*bn*/)
  24940. +int __builtin_satu (int /*Rd*/,int /*sa*/, int /*bn*/)
  24941. +int __builtin_satrnds (int /*Rd*/,int /*sa*/, int /*bn*/)
  24942. +int __builtin_satrndu (int /*Rd*/,int /*sa*/, int /*bn*/)
  24943. +short __builtin_mulsathh_h (short, short)
  24944. +int __builtin_mulsathh_w (short, short)
  24945. +short __builtin_mulsatrndhh_h (short, short)
  24946. +int __builtin_mulsatrndwh_w (int, short)
  24947. +int __builtin_mulsatwh_w (int, short)
  24948. +int __builtin_macsathh_w (int, short, short)
  24949. +short __builtin_satadd_h (short, short)
  24950. +short __builtin_satsub_h (short, short)
  24951. +int __builtin_satadd_w (int, int)
  24952. +int __builtin_satsub_w (int, int)
  24953. +long long __builtin_mulwh_d(int, short)
  24954. +long long __builtin_mulnwh_d(int, short)
  24955. +long long __builtin_macwh_d(long long, int, short)
  24956. +long long __builtin_machh_d(long long, short, short)
  24957. +@end smallexample
  24958. +
  24959. +Other built-in functions for instructions that cannot easily be
  24960. +generated by the compiler.
  24961. +
  24962. +@smallexample
  24963. +void __builtin_ssrf(int);
  24964. +void __builtin_csrf(int);
  24965. +void __builtin_musfr(int);
  24966. +int __builtin_mustr(void);
  24967. +int __builtin_mfsr(int /*Status Register Address*/)
  24968. +void __builtin_mtsr(int /*Status Register Address*/, int /*Value*/)
  24969. +int __builtin_mfdr(int /*Debug Register Address*/)
  24970. +void __builtin_mtdr(int /*Debug Register Address*/, int /*Value*/)
  24971. +void __builtin_cache(void * /*Address*/, int /*Cache Operation*/)
  24972. +void __builtin_sync(int /*Sync Operation*/)
  24973. +void __builtin_tlbr(void)
  24974. +void __builtin_tlbs(void)
  24975. +void __builtin_tlbw(void)
  24976. +void __builtin_breakpoint(void)
  24977. +int __builtin_xchg(void * /*Address*/, int /*Value*/ )
  24978. +short __builtin_bswap_16(short)
  24979. +int __builtin_bswap_32(int)
  24980. +void __builtin_cop(int/*cpnr*/, int/*crd*/, int/*crx*/, int/*cry*/, int/*op*/)
  24981. +int __builtin_mvcr_w(int/*cpnr*/, int/*crs*/)
  24982. +void __builtin_mvrc_w(int/*cpnr*/, int/*crd*/, int/*value*/)
  24983. +long long __builtin_mvcr_d(int/*cpnr*/, int/*crs*/)
  24984. +void __builtin_mvrc_d(int/*cpnr*/, int/*crd*/, long long/*value*/)
  24985. +@end smallexample
  24986. +
  24987. @node Blackfin Built-in Functions
  24988. @subsection Blackfin Built-in Functions
  24989. diff -Nur gcc-4.4.6.orig/gcc/doc/invoke.texi gcc-4.4.6/gcc/doc/invoke.texi
  24990. --- gcc-4.4.6.orig/gcc/doc/invoke.texi 2011-03-23 23:02:12.000000000 +0100
  24991. +++ gcc-4.4.6/gcc/doc/invoke.texi 2011-10-22 19:23:08.536581300 +0200
  24992. @@ -195,7 +195,7 @@
  24993. -fvisibility-ms-compat @gol
  24994. -Wabi -Wctor-dtor-privacy @gol
  24995. -Wnon-virtual-dtor -Wreorder @gol
  24996. --Weffc++ -Wstrict-null-sentinel @gol
  24997. +-Weffc++ -Wno-deprecated @gol
  24998. -Wno-non-template-friend -Wold-style-cast @gol
  24999. -Woverloaded-virtual -Wno-pmf-conversions @gol
  25000. -Wsign-promo}
  25001. @@ -641,6 +641,12 @@
  25002. -mauto-incdec -minmax -mlong-calls -mshort @gol
  25003. -msoft-reg-count=@var{count}}
  25004. +@emph{AVR32 Options}
  25005. +@gccoptlist{-muse-rodata-section -mhard-float -msoft-float -mrelax @gol
  25006. +-mforce-double-align -mno-init-got -mrelax -mmd-reorg-opt -masm-addr-pseudos @gol
  25007. +-mpart=@var{part} -mcpu=@var{cpu} -march=@var{arch} @gol
  25008. +-mfast-float -mimm-in-const-pool}
  25009. +
  25010. @emph{MCore Options}
  25011. @gccoptlist{-mhardlit -mno-hardlit -mdiv -mno-div -mrelax-immediates @gol
  25012. -mno-relax-immediates -mwide-bitfields -mno-wide-bitfields @gol
  25013. @@ -3256,13 +3262,11 @@
  25014. If you want to warn about code which uses the uninitialized value of the
  25015. variable in its own initializer, use the @option{-Winit-self} option.
  25016. -These warnings occur for individual uninitialized or clobbered
  25017. -elements of structure, union or array variables as well as for
  25018. -variables which are uninitialized or clobbered as a whole. They do
  25019. -not occur for variables or elements declared @code{volatile}. Because
  25020. -these warnings depend on optimization, the exact variables or elements
  25021. -for which there are warnings will depend on the precise optimization
  25022. -options and version of GCC used.
  25023. +These warnings occur only for variables that are candidates for
  25024. +register allocation. Therefore, they do not occur for a variable that
  25025. +is declared @code{volatile}, or whose address is taken, or whose size
  25026. +is other than 1, 2, 4 or 8 bytes. Also, they do not occur for
  25027. +structures, unions or arrays, even when they are in registers.
  25028. Note that there may be no warning about a variable that is used only
  25029. to compute a value that itself is never used, because such
  25030. @@ -7445,10 +7449,6 @@
  25031. we always try to remove unnecessary ivs from the set during its
  25032. optimization when a new iv is added to the set.
  25033. -@item scev-max-expr-size
  25034. -Bound on size of expressions used in the scalar evolutions analyzer.
  25035. -Large expressions slow the analyzer.
  25036. -
  25037. @item omega-max-vars
  25038. The maximum number of variables in an Omega constraint system.
  25039. The default value is 128.
  25040. @@ -8844,6 +8844,7 @@
  25041. * ARC Options::
  25042. * ARM Options::
  25043. * AVR Options::
  25044. +* AVR32 Options::
  25045. * Blackfin Options::
  25046. * CRIS Options::
  25047. * CRX Options::
  25048. @@ -9332,6 +9333,145 @@
  25049. size.
  25050. @end table
  25051. +@node AVR32 Options
  25052. +@subsection AVR32 Options
  25053. +@cindex AVR32 Options
  25054. +
  25055. +These options are defined for AVR32 implementations:
  25056. +
  25057. +@table @gcctabopt
  25058. +@item -muse-rodata-section
  25059. +@opindex muse-rodata-section
  25060. +Use section @samp{.rodata} for read-only data instead of @samp{.text}.
  25061. +
  25062. +@item -mhard-float
  25063. +@opindex mhard-float
  25064. +Use floating point coprocessor instructions.
  25065. +
  25066. +@item -msoft-float
  25067. +@opindex msoft-float
  25068. +Use software floating-point library for floating-point operations.
  25069. +
  25070. +@item -mforce-double-align
  25071. +@opindex mforce-double-align
  25072. +Force double-word alignment for double-word memory accesses.
  25073. +
  25074. +@item -masm-addr-pseudos
  25075. +@opindex masm-addr-pseudos
  25076. +Use assembler pseudo-instructions lda.w and call for handling direct
  25077. +addresses. (Enabled by default)
  25078. +
  25079. +@item -mno-init-got
  25080. +@opindex mno-init-got
  25081. +Do not initialize the GOT register before using it when compiling PIC
  25082. +code.
  25083. +
  25084. +@item -mrelax
  25085. +@opindex mrelax
  25086. +Let invoked assembler and linker do relaxing
  25087. +(Enabled by default when optimization level is >1).
  25088. +This means that when the address of symbols are known at link time,
  25089. +the linker can optimize @samp{icall} and @samp{mcall}
  25090. +instructions into a @samp{rcall} instruction if possible.
  25091. +Loading the address of a symbol can also be optimized.
  25092. +
  25093. +@item -mmd-reorg-opt
  25094. +@opindex mmd-reorg-opt
  25095. +Perform machine dependent optimizations in reorg stage.
  25096. +
  25097. +@item -mpart=@var{part}
  25098. +@opindex mpart
  25099. +Generate code for the specified part. Permissible parts are:
  25100. +@samp{ap7000},
  25101. +@samp{ap7001},
  25102. +@samp{ap7002},
  25103. +@samp{ap7200},
  25104. +@samp{uc3a0128},
  25105. +@samp{uc3a0256},
  25106. +@samp{uc3a0512},
  25107. +@samp{uc3a0512es},
  25108. +@samp{uc3a1128},
  25109. +@samp{uc3a1256},
  25110. +@samp{uc3a1512},
  25111. +@samp{uc3a1512es},
  25112. +@samp{uc3a3revd},
  25113. +@samp{uc3a364},
  25114. +@samp{uc3a364s},
  25115. +@samp{uc3a3128},
  25116. +@samp{uc3a3128s},
  25117. +@samp{uc3a3256},
  25118. +@samp{uc3a3256s},
  25119. +@samp{uc3a464},
  25120. +@samp{uc3a464s},
  25121. +@samp{uc3a4128},
  25122. +@samp{uc3a4128s},
  25123. +@samp{uc3a4256},
  25124. +@samp{uc3a4256s},
  25125. +@samp{uc3b064},
  25126. +@samp{uc3b0128},
  25127. +@samp{uc3b0256},
  25128. +@samp{uc3b0256es},
  25129. +@samp{uc3b0512},
  25130. +@samp{uc3b0512revc},
  25131. +@samp{uc3b164},
  25132. +@samp{uc3b1128},
  25133. +@samp{uc3b1256},
  25134. +@samp{uc3b1256es},
  25135. +@samp{uc3b1512},
  25136. +@samp{uc3b1512revc}
  25137. +@samp{uc64d3},
  25138. +@samp{uc128d3},
  25139. +@samp{uc64d4},
  25140. +@samp{uc128d4},
  25141. +@samp{uc3c0512crevc},
  25142. +@samp{uc3c1512crevc},
  25143. +@samp{uc3c2512crevc},
  25144. +@samp{uc3l0256},
  25145. +@samp{uc3l0128},
  25146. +@samp{uc3l064},
  25147. +@samp{uc3l032},
  25148. +@samp{uc3l016},
  25149. +@samp{uc3l064revb},
  25150. +@samp{uc64l3u},
  25151. +@samp{uc128l3u},
  25152. +@samp{uc256l3u},
  25153. +@samp{uc64l4u},
  25154. +@samp{uc128l4u},
  25155. +@samp{uc256l4u},
  25156. +@samp{uc3c064c},
  25157. +@samp{uc3c0128c},
  25158. +@samp{uc3c0256c},
  25159. +@samp{uc3c0512c},
  25160. +@samp{uc3c164c},
  25161. +@samp{uc3c1128c},
  25162. +@samp{uc3c1256c},
  25163. +@samp{uc3c1512c},
  25164. +@samp{uc3c264c},
  25165. +@samp{uc3c2128c},
  25166. +@samp{uc3c2256c},
  25167. +@samp{uc3c2512c},
  25168. +@samp{mxt768e}.
  25169. +
  25170. +@item -mcpu=@var{cpu-type}
  25171. +@opindex mcpu
  25172. +Same as -mpart. Obsolete.
  25173. +
  25174. +@item -march=@var{arch}
  25175. +@opindex march
  25176. +Generate code for the specified architecture. Permissible architectures are:
  25177. +@samp{ap}, @samp{uc} and @samp{ucr2}.
  25178. +
  25179. +@item -mfast-float
  25180. +@opindex mfast-float
  25181. +Enable fast floating-point library that does not conform to IEEE-754 but is still good enough
  25182. +for most applications. The fast floating-point library does not round to the nearest even
  25183. +but away from zero. Enabled by default if the -funsafe-math-optimizations switch is specified.
  25184. +
  25185. +@item -mimm-in-const-pool
  25186. +@opindex mimm-in-const-pool
  25187. +Put large immediates in constant pool. This is enabled by default for archs with insn-cache.
  25188. +@end table
  25189. +
  25190. @node Blackfin Options
  25191. @subsection Blackfin Options
  25192. @cindex Blackfin Options
  25193. @@ -9387,29 +9527,12 @@
  25194. contain speculative loads after jump instructions. If this option is used,
  25195. @code{__WORKAROUND_SPECULATIVE_LOADS} is defined.
  25196. -@item -mno-specld-anomaly
  25197. -@opindex mno-specld-anomaly
  25198. -Don't generate extra code to prevent speculative loads from occurring.
  25199. -
  25200. @item -mcsync-anomaly
  25201. @opindex mcsync-anomaly
  25202. When enabled, the compiler will ensure that the generated code does not
  25203. contain CSYNC or SSYNC instructions too soon after conditional branches.
  25204. If this option is used, @code{__WORKAROUND_SPECULATIVE_SYNCS} is defined.
  25205. -@item -mno-csync-anomaly
  25206. -@opindex mno-csync-anomaly
  25207. -Don't generate extra code to prevent CSYNC or SSYNC instructions from
  25208. -occurring too soon after a conditional branch.
  25209. -
  25210. -@item -mlow-64k
  25211. -@opindex mlow-64k
  25212. -When enabled, the compiler is free to take advantage of the knowledge that
  25213. -the entire program fits into the low 64k of memory.
  25214. -
  25215. -@item -mno-low-64k
  25216. -@opindex mno-low-64k
  25217. -Assume that the program is arbitrarily large. This is the default.
  25218. @item -mstack-check-l1
  25219. @opindex mstack-check-l1
  25220. @@ -9423,11 +9546,6 @@
  25221. without virtual memory management. This option implies @option{-fPIC}.
  25222. With a @samp{bfin-elf} target, this option implies @option{-msim}.
  25223. -@item -mno-id-shared-library
  25224. -@opindex mno-id-shared-library
  25225. -Generate code that doesn't assume ID based shared libraries are being used.
  25226. -This is the default.
  25227. -
  25228. @item -mleaf-id-shared-library
  25229. @opindex mleaf-id-shared-library
  25230. Generate code that supports shared libraries via the library ID method,
  25231. @@ -9469,11 +9587,6 @@
  25232. will lie outside of the 24 bit addressing range of the offset based
  25233. version of subroutine call instruction.
  25234. -This feature is not enabled by default. Specifying
  25235. -@option{-mno-long-calls} will restore the default behavior. Note these
  25236. -switches have no effect on how the compiler generates code to handle
  25237. -function calls via function pointers.
  25238. -
  25239. @item -mfast-fp
  25240. @opindex mfast-fp
  25241. Link with the fast floating-point library. This library relaxes some of
  25242. diff -Nur gcc-4.4.6.orig/gcc/doc/md.texi gcc-4.4.6/gcc/doc/md.texi
  25243. --- gcc-4.4.6.orig/gcc/doc/md.texi 2009-05-07 10:14:55.000000000 +0200
  25244. +++ gcc-4.4.6/gcc/doc/md.texi 2011-10-22 19:23:08.548581303 +0200
  25245. @@ -4,6 +4,7 @@
  25246. @c This is part of the GCC manual.
  25247. @c For copying conditions, see the file gcc.texi.
  25248. +
  25249. @ifset INTERNALS
  25250. @node Machine Desc
  25251. @chapter Machine Descriptions
  25252. @@ -1685,6 +1686,58 @@
  25253. A memory reference suitable for the ARMv4 ldrsb instruction.
  25254. @end table
  25255. +@item AVR32 family---@file{avr32.h}
  25256. +@table @code
  25257. +@item f
  25258. +Floating-point registers (f0 to f15)
  25259. +
  25260. +@item Ku@var{bits}
  25261. +Unsigned constant representable with @var{bits} number of bits (Must be
  25262. +two digits). I.e: An unsigned 8-bit constant is written as @samp{Ku08}
  25263. +
  25264. +@item Ks@var{bits}
  25265. +Signed constant representable with @var{bits} number of bits (Must be
  25266. +two digits). I.e: A signed 12-bit constant is written as @samp{Ks12}
  25267. +
  25268. +@item Is@var{bits}
  25269. +The negated range of a signed constant representable with @var{bits}
  25270. +number of bits. The same as @samp{Ks@var{bits}} with a negated range.
  25271. +This means that the constant must be in the range @math{-2^{bits-1}-1} to @math{2^{bits-1}}
  25272. +
  25273. +@item G
  25274. +A single/double precision floating-point immediate or 64-bit integer
  25275. +immediate where the least and most significant words both can be
  25276. +loaded with a move instruction. That is the the integer form of the
  25277. +values in the least and most significant words both are in the range
  25278. +@math{-2^{20}} to @math{2^{20}-1}.
  25279. +
  25280. +@item RKs@var{bits}
  25281. +A memory reference where the address consists of a base register
  25282. +plus a signed immediate displacement with range given by @samp{Ks@var{bits}}
  25283. +which has the same format as for the signed immediate integer constraint
  25284. +given above.
  25285. +
  25286. +@item RKu@var{bits}
  25287. +A memory reference where the address consists of a base register
  25288. +plus an unsigned immediate displacement with range given by @samp{Ku@var{bits}}
  25289. +which has the same format as for the unsigned immediate integer constraint
  25290. +given above.
  25291. +
  25292. +@item S
  25293. +A memory reference with an immediate or register offset
  25294. +
  25295. +@item T
  25296. +A memory reference to a constant pool entry
  25297. +
  25298. +@item W
  25299. +A valid operand for use in the @samp{lda.w} instruction macro when
  25300. +relaxing is enabled
  25301. +
  25302. +@item Z
  25303. +A memory reference valid for coprocessor memory instructions
  25304. +
  25305. +@end table
  25306. +
  25307. @item AVR family---@file{config/avr/constraints.md}
  25308. @table @code
  25309. @item l
  25310. diff -Nur gcc-4.4.6.orig/gcc/expmed.c gcc-4.4.6/gcc/expmed.c
  25311. --- gcc-4.4.6.orig/gcc/expmed.c 2010-08-06 09:52:04.000000000 +0200
  25312. +++ gcc-4.4.6/gcc/expmed.c 2011-10-22 19:23:08.548581303 +0200
  25313. @@ -472,9 +472,9 @@
  25314. ? ((GET_MODE_SIZE (fieldmode) >= UNITS_PER_WORD
  25315. || GET_MODE_SIZE (GET_MODE (op0)) == GET_MODE_SIZE (fieldmode))
  25316. && byte_offset % GET_MODE_SIZE (fieldmode) == 0)
  25317. - : (! SLOW_UNALIGNED_ACCESS (fieldmode, MEM_ALIGN (op0))
  25318. + : ( (! SLOW_UNALIGNED_ACCESS (fieldmode, MEM_ALIGN (op0))
  25319. || (offset * BITS_PER_UNIT % bitsize == 0
  25320. - && MEM_ALIGN (op0) % GET_MODE_BITSIZE (fieldmode) == 0))))
  25321. + && MEM_ALIGN (op0) % GET_MODE_BITSIZE (fieldmode) == 0)))))
  25322. {
  25323. if (MEM_P (op0))
  25324. op0 = adjust_address (op0, fieldmode, offset);
  25325. diff -Nur gcc-4.4.6.orig/gcc/expr.c gcc-4.4.6/gcc/expr.c
  25326. --- gcc-4.4.6.orig/gcc/expr.c 2011-01-16 23:56:10.000000000 +0100
  25327. +++ gcc-4.4.6/gcc/expr.c 2011-10-22 19:23:08.552581303 +0200
  25328. @@ -52,6 +52,7 @@
  25329. #include "tree-flow.h"
  25330. #include "target.h"
  25331. #include "timevar.h"
  25332. +#include "c-common.h"
  25333. #include "df.h"
  25334. #include "diagnostic.h"
  25335. @@ -3647,16 +3648,17 @@
  25336. }
  25337. else
  25338. {
  25339. + emit_move_insn (stack_pointer_rtx,
  25340. + expand_binop (Pmode,
  25341. #ifdef STACK_GROWS_DOWNWARD
  25342. - /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
  25343. - dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
  25344. - GEN_INT (-(HOST_WIDE_INT) rounded_size));
  25345. + sub_optab,
  25346. #else
  25347. - /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
  25348. - dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
  25349. - GEN_INT (rounded_size));
  25350. + add_optab,
  25351. #endif
  25352. - dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
  25353. + stack_pointer_rtx,
  25354. + GEN_INT (rounded_size),
  25355. + NULL_RTX, 0, OPTAB_LIB_WIDEN));
  25356. + dest_addr = stack_pointer_rtx;
  25357. }
  25358. dest = gen_rtx_MEM (mode, dest_addr);
  25359. @@ -5775,7 +5777,8 @@
  25360. is a bit field, we cannot use addressing to access it.
  25361. Use bit-field techniques or SUBREG to store in it. */
  25362. - if (mode == VOIDmode
  25363. + if (
  25364. + mode == VOIDmode
  25365. || (mode != BLKmode && ! direct_store[(int) mode]
  25366. && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
  25367. && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
  25368. @@ -5932,7 +5935,18 @@
  25369. {
  25370. tree field = TREE_OPERAND (exp, 1);
  25371. size_tree = DECL_SIZE (field);
  25372. - if (!DECL_BIT_FIELD (field))
  25373. + if (!DECL_BIT_FIELD (field)
  25374. + /* Added for AVR32:
  25375. + Bitfields with a size equal to a target storage
  25376. + type might not cause DECL_BIT_FIELD to return
  25377. + true since it can be optimized into a normal array
  25378. + access operation. But for volatile bitfields we do
  25379. + not allow this when targetm.narrow_volatile_bitfield ()
  25380. + is false. We can use DECL_C_BIT_FIELD to check if this
  25381. + really is a c-bitfield. */
  25382. + && !(TREE_THIS_VOLATILE (exp)
  25383. + && !targetm.narrow_volatile_bitfield ()
  25384. + && DECL_C_BIT_FIELD (field)) )
  25385. mode = DECL_MODE (field);
  25386. else if (DECL_MODE (field) == BLKmode)
  25387. blkmode_bitfield = true;
  25388. @@ -7915,7 +7929,8 @@
  25389. by doing the extract into an object as wide as the field
  25390. (which we know to be the width of a basic mode), then
  25391. storing into memory, and changing the mode to BLKmode. */
  25392. - if (mode1 == VOIDmode
  25393. + if (
  25394. + mode1 == VOIDmode
  25395. || REG_P (op0) || GET_CODE (op0) == SUBREG
  25396. || (mode1 != BLKmode && ! direct_load[(int) mode1]
  25397. && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
  25398. diff -Nur gcc-4.4.6.orig/gcc/function.c gcc-4.4.6/gcc/function.c
  25399. --- gcc-4.4.6.orig/gcc/function.c 2010-08-16 22:24:54.000000000 +0200
  25400. +++ gcc-4.4.6/gcc/function.c 2011-10-22 19:23:08.552581303 +0200
  25401. @@ -2810,7 +2810,11 @@
  25402. assign_parm_remove_parallels (data);
  25403. /* Copy the value into the register. */
  25404. - if (data->nominal_mode != data->passed_mode
  25405. + if ( (data->nominal_mode != data->passed_mode
  25406. + /* Added for AVR32: If passed_mode is equal
  25407. + to promoted nominal mode why should be convert?
  25408. + The conversion should make no difference. */
  25409. + && data->passed_mode != promoted_nominal_mode)
  25410. || promoted_nominal_mode != data->promoted_mode)
  25411. {
  25412. int save_tree_used;
  25413. diff -Nur gcc-4.4.6.orig/gcc/genemit.c gcc-4.4.6/gcc/genemit.c
  25414. --- gcc-4.4.6.orig/gcc/genemit.c 2009-02-20 16:20:38.000000000 +0100
  25415. +++ gcc-4.4.6/gcc/genemit.c 2011-10-22 19:23:08.552581303 +0200
  25416. @@ -121,6 +121,24 @@
  25417. }
  25418. static void
  25419. +gen_vararg_prologue(int operands)
  25420. +{
  25421. + int i;
  25422. +
  25423. + if (operands > 1)
  25424. + {
  25425. + for (i = 1; i < operands; i++)
  25426. + printf(" rtx operand%d ATTRIBUTE_UNUSED;\n", i);
  25427. +
  25428. + printf(" va_list args;\n\n");
  25429. + printf(" va_start(args, operand0);\n");
  25430. + for (i = 1; i < operands; i++)
  25431. + printf(" operand%d = va_arg(args, rtx);\n", i);
  25432. + printf(" va_end(args);\n\n");
  25433. + }
  25434. +}
  25435. +
  25436. +static void
  25437. print_code (RTX_CODE code)
  25438. {
  25439. const char *p1;
  25440. @@ -406,18 +424,16 @@
  25441. fatal ("match_dup operand number has no match_operand");
  25442. /* Output the function name and argument declarations. */
  25443. - printf ("rtx\ngen_%s (", XSTR (insn, 0));
  25444. + printf ("rtx\ngen_%s ", XSTR (insn, 0));
  25445. +
  25446. if (operands)
  25447. - for (i = 0; i < operands; i++)
  25448. - if (i)
  25449. - printf (",\n\trtx operand%d ATTRIBUTE_UNUSED", i);
  25450. + printf("(rtx operand0 ATTRIBUTE_UNUSED, ...)\n");
  25451. else
  25452. - printf ("rtx operand%d ATTRIBUTE_UNUSED", i);
  25453. - else
  25454. - printf ("void");
  25455. - printf (")\n");
  25456. + printf("(void)\n");
  25457. printf ("{\n");
  25458. + gen_vararg_prologue(operands);
  25459. +
  25460. /* Output code to construct and return the rtl for the instruction body. */
  25461. if (XVECLEN (insn, 1) == 1)
  25462. @@ -461,16 +477,12 @@
  25463. operands = max_operand_vec (expand, 1);
  25464. /* Output the function name and argument declarations. */
  25465. - printf ("rtx\ngen_%s (", XSTR (expand, 0));
  25466. + printf ("rtx\ngen_%s ", XSTR (expand, 0));
  25467. if (operands)
  25468. - for (i = 0; i < operands; i++)
  25469. - if (i)
  25470. - printf (",\n\trtx operand%d", i);
  25471. - else
  25472. - printf ("rtx operand%d", i);
  25473. + printf("(rtx operand0 ATTRIBUTE_UNUSED, ...)\n");
  25474. else
  25475. - printf ("void");
  25476. - printf (")\n");
  25477. + printf("(void)\n");
  25478. +
  25479. printf ("{\n");
  25480. /* If we don't have any C code to write, only one insn is being written,
  25481. @@ -480,6 +492,8 @@
  25482. && operands > max_dup_opno
  25483. && XVECLEN (expand, 1) == 1)
  25484. {
  25485. + gen_vararg_prologue(operands);
  25486. +
  25487. printf (" return ");
  25488. gen_exp (XVECEXP (expand, 1, 0), DEFINE_EXPAND, NULL);
  25489. printf (";\n}\n\n");
  25490. @@ -493,6 +507,7 @@
  25491. for (; i <= max_scratch_opno; i++)
  25492. printf (" rtx operand%d ATTRIBUTE_UNUSED;\n", i);
  25493. printf (" rtx _val = 0;\n");
  25494. + gen_vararg_prologue(operands);
  25495. printf (" start_sequence ();\n");
  25496. /* The fourth operand of DEFINE_EXPAND is some code to be executed
  25497. diff -Nur gcc-4.4.6.orig/gcc/genflags.c gcc-4.4.6/gcc/genflags.c
  25498. --- gcc-4.4.6.orig/gcc/genflags.c 2007-07-26 10:37:01.000000000 +0200
  25499. +++ gcc-4.4.6/gcc/genflags.c 2011-10-22 19:23:08.552581303 +0200
  25500. @@ -127,7 +127,6 @@
  25501. gen_proto (rtx insn)
  25502. {
  25503. int num = num_operands (insn);
  25504. - int i;
  25505. const char *name = XSTR (insn, 0);
  25506. int truth = maybe_eval_c_test (XSTR (insn, 2));
  25507. @@ -158,12 +157,7 @@
  25508. if (num == 0)
  25509. fputs ("void", stdout);
  25510. else
  25511. - {
  25512. - for (i = 1; i < num; i++)
  25513. - fputs ("rtx, ", stdout);
  25514. -
  25515. - fputs ("rtx", stdout);
  25516. - }
  25517. + fputs("rtx, ...", stdout);
  25518. puts (");");
  25519. @@ -173,12 +167,7 @@
  25520. {
  25521. printf ("static inline rtx\ngen_%s", name);
  25522. if (num > 0)
  25523. - {
  25524. - putchar ('(');
  25525. - for (i = 0; i < num-1; i++)
  25526. - printf ("rtx ARG_UNUSED (%c), ", 'a' + i);
  25527. - printf ("rtx ARG_UNUSED (%c))\n", 'a' + i);
  25528. - }
  25529. + puts("(rtx ARG_UNUSED(a), ...)");
  25530. else
  25531. puts ("(void)");
  25532. puts ("{\n return 0;\n}");
  25533. diff -Nur gcc-4.4.6.orig/gcc/genoutput.c gcc-4.4.6/gcc/genoutput.c
  25534. --- gcc-4.4.6.orig/gcc/genoutput.c 2009-02-20 16:20:38.000000000 +0100
  25535. +++ gcc-4.4.6/gcc/genoutput.c 2011-10-22 19:23:08.552581303 +0200
  25536. @@ -386,7 +386,7 @@
  25537. }
  25538. if (d->name && d->name[0] != '*')
  25539. - printf (" (insn_gen_fn) gen_%s,\n", d->name);
  25540. + printf (" gen_%s,\n", d->name);
  25541. else
  25542. printf (" 0,\n");
  25543. diff -Nur gcc-4.4.6.orig/gcc/ifcvt.c gcc-4.4.6/gcc/ifcvt.c
  25544. --- gcc-4.4.6.orig/gcc/ifcvt.c 2010-01-07 15:59:59.000000000 +0100
  25545. +++ gcc-4.4.6/gcc/ifcvt.c 2011-10-22 19:23:08.552581303 +0200
  25546. @@ -84,7 +84,7 @@
  25547. static int num_updated_if_blocks;
  25548. /* # of changes made. */
  25549. -static int num_true_changes;
  25550. +int num_true_changes;
  25551. /* Whether conditional execution changes were made. */
  25552. static int cond_exec_changed_p;
  25553. @@ -290,6 +290,9 @@
  25554. if (must_be_last)
  25555. return FALSE;
  25556. +#ifdef IFCVT_ALLOW_MODIFY_TEST_IN_INSN
  25557. + if ( !IFCVT_ALLOW_MODIFY_TEST_IN_INSN )
  25558. +#endif
  25559. if (modified_in_p (test, insn))
  25560. {
  25561. if (!mod_ok)
  25562. @@ -570,15 +573,18 @@
  25563. IFCVT_MODIFY_FINAL (ce_info);
  25564. #endif
  25565. + /* Merge the blocks! */
  25566. + if ( reload_completed ){
  25567. /* Conversion succeeded. */
  25568. if (dump_file)
  25569. fprintf (dump_file, "%d insn%s converted to conditional execution.\n",
  25570. n_insns, (n_insns == 1) ? " was" : "s were");
  25571. - /* Merge the blocks! */
  25572. merge_if_block (ce_info);
  25573. cond_exec_changed_p = TRUE;
  25574. return TRUE;
  25575. + }
  25576. + return FALSE;
  25577. fail:
  25578. #ifdef IFCVT_MODIFY_CANCEL
  25579. @@ -1087,7 +1093,11 @@
  25580. != UNKNOWN))
  25581. {
  25582. rtx cond = if_info->cond;
  25583. - enum rtx_code code = reversed_comparison_code (cond, if_info->jump);
  25584. + /* This generates wrong code for AVR32. The cond code need not be reversed
  25585. + since the addmodecc patterns add if the condition is NOT met. */
  25586. + /* enum rtx_code code = reversed_comparison_code (cond, if_info->jump);*/
  25587. + enum rtx_code code = GET_CODE(cond);
  25588. +
  25589. /* First try to use addcc pattern. */
  25590. if (general_operand (XEXP (cond, 0), VOIDmode)
  25591. @@ -3039,7 +3049,12 @@
  25592. && noce_find_if_block (test_bb, then_edge, else_edge, pass))
  25593. goto success;
  25594. - if (HAVE_conditional_execution && reload_completed
  25595. + if (HAVE_conditional_execution &&
  25596. +#ifdef IFCVT_COND_EXEC_BEFORE_RELOAD
  25597. + (reload_completed || IFCVT_COND_EXEC_BEFORE_RELOAD)
  25598. +#else
  25599. + reload_completed
  25600. +#endif
  25601. && cond_exec_find_if_block (&ce_info))
  25602. goto success;
  25603. @@ -3154,7 +3169,11 @@
  25604. /* We only ever should get here after reload,
  25605. and only if we have conditional execution. */
  25606. +#ifdef IFCVT_COND_EXEC_BEFORE_RELOAD
  25607. + gcc_assert (HAVE_conditional_execution && (reload_completed||IFCVT_COND_EXEC_BEFORE_RELOAD));
  25608. +#else
  25609. gcc_assert (HAVE_conditional_execution && reload_completed);
  25610. +#endif
  25611. /* Discover if any fall through predecessors of the current test basic block
  25612. were && tests (which jump to the else block) or || tests (which jump to
  25613. @@ -4259,6 +4278,14 @@
  25614. static unsigned int
  25615. rest_of_handle_if_after_reload (void)
  25616. {
  25617. + /* Hack for the AVR32 experimental ifcvt processing before reload.
  25618. + The AVR32 specific ifcvt code needs to know when ifcvt after reload
  25619. + has begun. */
  25620. +#ifdef IFCVT_COND_EXEC_BEFORE_RELOAD
  25621. + if ( IFCVT_COND_EXEC_BEFORE_RELOAD )
  25622. + cfun->machine->ifcvt_after_reload = 1;
  25623. +#endif
  25624. +
  25625. if_convert ();
  25626. return 0;
  25627. }
  25628. diff -Nur gcc-4.4.6.orig/gcc/longlong.h gcc-4.4.6/gcc/longlong.h
  25629. --- gcc-4.4.6.orig/gcc/longlong.h 2009-08-12 00:36:56.000000000 +0200
  25630. +++ gcc-4.4.6/gcc/longlong.h 2011-10-22 19:23:08.552581303 +0200
  25631. @@ -250,6 +250,41 @@
  25632. #define COUNT_LEADING_ZEROS_0 32
  25633. #endif
  25634. +#if defined (__avr32__) && W_TYPE_SIZE == 32
  25635. +#define add_ssaaaa(sh, sl, ah, al, bh, bl) \
  25636. + __asm__ ("add\t%1, %4, %5\n\tadc\t%0, %2, %3" \
  25637. + : "=r" ((USItype) (sh)), \
  25638. + "=&r" ((USItype) (sl)) \
  25639. + : "r" ((USItype) (ah)), \
  25640. + "r" ((USItype) (bh)), \
  25641. + "r" ((USItype) (al)), \
  25642. + "r" ((USItype) (bl)) __CLOBBER_CC)
  25643. +#define sub_ddmmss(sh, sl, ah, al, bh, bl) \
  25644. + __asm__ ("sub\t%1, %4, %5\n\tsbc\t%0, %2, %3" \
  25645. + : "=r" ((USItype) (sh)), \
  25646. + "=&r" ((USItype) (sl)) \
  25647. + : "r" ((USItype) (ah)), \
  25648. + "r" ((USItype) (bh)), \
  25649. + "r" ((USItype) (al)), \
  25650. + "r" ((USItype) (bl)) __CLOBBER_CC)
  25651. +
  25652. +#if !defined (__AVR32_NO_MUL__)
  25653. +#define __umulsidi3(a,b) ((UDItype)(a) * (UDItype)(b))
  25654. +
  25655. +#define umul_ppmm(w1, w0, u, v) \
  25656. +{ \
  25657. + DWunion __w; \
  25658. + __w.ll = __umulsidi3 (u, v); \
  25659. + w1 = __w.s.high; \
  25660. + w0 = __w.s.low; \
  25661. +}
  25662. +#endif
  25663. +
  25664. +#define count_leading_zeros(COUNT,X) ((COUNT) = __builtin_clz (X))
  25665. +#define count_trailing_zeros(COUNT,X) ((COUNT) = __builtin_ctz (X))
  25666. +#define COUNT_LEADING_ZEROS_0 32
  25667. +#endif
  25668. +
  25669. #if defined (__CRIS__) && __CRIS_arch_version >= 3
  25670. #define count_leading_zeros(COUNT, X) ((COUNT) = __builtin_clz (X))
  25671. #if __CRIS_arch_version >= 8
  25672. diff -Nur gcc-4.4.6.orig/gcc/optabs.h gcc-4.4.6/gcc/optabs.h
  25673. --- gcc-4.4.6.orig/gcc/optabs.h 2008-08-07 09:35:51.000000000 +0200
  25674. +++ gcc-4.4.6/gcc/optabs.h 2011-10-22 19:23:08.556581301 +0200
  25675. @@ -603,7 +603,7 @@
  25676. extern optab code_to_optab[NUM_RTX_CODE + 1];
  25677. -typedef rtx (*rtxfun) (rtx);
  25678. +typedef rtx (*rtxfun) (rtx, ...);
  25679. /* Indexed by the rtx-code for a conditional (e.g. EQ, LT,...)
  25680. gives the gen_function to make a branch to test that condition. */
  25681. diff -Nur gcc-4.4.6.orig/gcc/regrename.c gcc-4.4.6/gcc/regrename.c
  25682. --- gcc-4.4.6.orig/gcc/regrename.c 2009-02-20 16:20:38.000000000 +0100
  25683. +++ gcc-4.4.6/gcc/regrename.c 2011-10-22 19:23:08.556581301 +0200
  25684. @@ -1582,6 +1582,9 @@
  25685. bool changed = false;
  25686. rtx insn;
  25687. + rtx prev_pred_test;
  25688. + int prev_pred_insn_skipped = 0;
  25689. +
  25690. for (insn = BB_HEAD (bb); ; insn = NEXT_INSN (insn))
  25691. {
  25692. int n_ops, i, alt, predicated;
  25693. @@ -1621,6 +1624,58 @@
  25694. recog_data.operand_type[i] = OP_INOUT;
  25695. }
  25696. +
  25697. + /* Added for targets (AVR32) which supports test operands to be modified
  25698. + in cond_exec instruction. For these targets we cannot make a change to
  25699. + the test operands if one of the test operands is an output operand This beacuse
  25700. + changing the test operands might cause the need for inserting a new test
  25701. + insns in the middle of a sequence of cond_exec insns and if the test operands
  25702. + are modified these tests will fail.
  25703. + */
  25704. + if ( IFCVT_ALLOW_MODIFY_TEST_IN_INSN
  25705. + && predicated )
  25706. + {
  25707. + int insn_skipped = 0;
  25708. + rtx test = COND_EXEC_TEST (PATTERN (insn));
  25709. +
  25710. + /* Check if the previous insn was a skipped predicated insn with the same
  25711. + test as this predicated insns. If so we cannot do any modification to
  25712. + this insn either since we cannot emit the test insn because the operands
  25713. + are clobbered. */
  25714. + if ( prev_pred_insn_skipped
  25715. + && (rtx_equal_p (test, prev_pred_test)
  25716. + || rtx_equal_p (test, reversed_condition (prev_pred_test))) )
  25717. + {
  25718. + insn_skipped = 1;
  25719. + }
  25720. + else
  25721. + {
  25722. + /* Check if the output operand is used in the test expression. */
  25723. + for (i = 0; i < n_ops; ++i)
  25724. + if ( recog_data.operand_type[i] == OP_INOUT
  25725. + && reg_mentioned_p (recog_data.operand[i], test) )
  25726. + {
  25727. + insn_skipped = 1;
  25728. + break;
  25729. + }
  25730. +
  25731. + }
  25732. +
  25733. + prev_pred_test = test;
  25734. + prev_pred_insn_skipped = insn_skipped;
  25735. + if ( insn_skipped )
  25736. + {
  25737. + if (insn == BB_END (bb))
  25738. + break;
  25739. + else
  25740. + continue;
  25741. + }
  25742. + }
  25743. + else
  25744. + {
  25745. + prev_pred_insn_skipped = 0;
  25746. + }
  25747. +
  25748. /* For each earlyclobber operand, zap the value data. */
  25749. for (i = 0; i < n_ops; i++)
  25750. if (recog_op_alt[i][alt].earlyclobber)
  25751. diff -Nur gcc-4.4.6.orig/gcc/sched-deps.c gcc-4.4.6/gcc/sched-deps.c
  25752. --- gcc-4.4.6.orig/gcc/sched-deps.c 2010-08-24 10:53:11.000000000 +0200
  25753. +++ gcc-4.4.6/gcc/sched-deps.c 2011-10-22 19:23:08.556581301 +0200
  25754. @@ -1473,7 +1473,14 @@
  25755. prev_nonnote = prev_nonnote_insn (insn);
  25756. if (BLOCK_FOR_INSN (insn) == BLOCK_FOR_INSN (prev_nonnote)
  25757. - && ! sched_insns_conditions_mutex_p (insn, prev_nonnote))
  25758. + /* Modification for AVR32 by RP: Why is this here, this will
  25759. + cause instruction to be without any dependencies which might
  25760. + cause it to be moved anywhere. For the AVR32 we try to keep
  25761. + a group of conditionals together even if they are mutual exclusive.
  25762. + */
  25763. + && (! sched_insns_conditions_mutex_p (insn, prev_nonnote)
  25764. + || GET_CODE (PATTERN (insn)) == COND_EXEC )
  25765. + )
  25766. add_dependence (insn, prev_nonnote, REG_DEP_ANTI);
  25767. }
  25768. @@ -2230,8 +2237,29 @@
  25769. if (code == COND_EXEC)
  25770. {
  25771. +#ifdef IFCVT_ALLOW_MODIFY_TEST_IN_INSN
  25772. + if (IFCVT_ALLOW_MODIFY_TEST_IN_INSN)
  25773. + {
  25774. + /* Check if we have a group og conditional instructions with the same test.
  25775. + If so we must make sure that they are not scheduled apart in order to
  25776. + avoid unnecesarry tests and if one of the registers in the test is modified
  25777. + in the instruction this is needed to ensure correct code. */
  25778. + if ( prev_nonnote_insn (insn)
  25779. + && INSN_P (prev_nonnote_insn (insn))
  25780. + && GET_CODE (PATTERN (prev_nonnote_insn (insn))) == COND_EXEC
  25781. + && rtx_equal_p (XEXP(COND_EXEC_TEST (PATTERN (prev_nonnote_insn (insn))), 0), XEXP (COND_EXEC_TEST (x), 0))
  25782. + && rtx_equal_p (XEXP(COND_EXEC_TEST (PATTERN (prev_nonnote_insn (insn))), 1), XEXP (COND_EXEC_TEST (x), 1))
  25783. + && ( GET_CODE (COND_EXEC_TEST (PATTERN (prev_nonnote_insn (insn)))) == GET_CODE (COND_EXEC_TEST (x))
  25784. + || GET_CODE (COND_EXEC_TEST (PATTERN (prev_nonnote_insn (insn)))) == reversed_comparison_code (COND_EXEC_TEST (x), insn)))
  25785. + {
  25786. + SCHED_GROUP_P (insn) = 1;
  25787. + //CANT_MOVE (prev_nonnote_insn (insn)) = 1;
  25788. + }
  25789. + }
  25790. +#endif
  25791. sched_analyze_2 (deps, COND_EXEC_TEST (x), insn);
  25792. +
  25793. /* ??? Should be recording conditions so we reduce the number of
  25794. false dependencies. */
  25795. x = COND_EXEC_CODE (x);
  25796. diff -Nur gcc-4.4.6.orig/gcc/testsuite/gcc.dg/sibcall-3.c gcc-4.4.6/gcc/testsuite/gcc.dg/sibcall-3.c
  25797. --- gcc-4.4.6.orig/gcc/testsuite/gcc.dg/sibcall-3.c 2009-01-08 18:56:52.000000000 +0100
  25798. +++ gcc-4.4.6/gcc/testsuite/gcc.dg/sibcall-3.c 2011-10-22 19:23:08.556581301 +0200
  25799. @@ -5,7 +5,7 @@
  25800. Copyright (C) 2002 Free Software Foundation Inc.
  25801. Contributed by Hans-Peter Nilsson <hp@bitrange.com> */
  25802. -/* { dg-do run { xfail { { arc-*-* avr-*-* cris-*-* crisv32-*-* h8300-*-* hppa*64*-*-* m32r-*-* m68hc1?-*-* mcore-*-* mn10300-*-* xstormy16-*-* v850*-*-* vax-*-* xtensa*-*-* } || { arm*-*-* && { ! arm32 } } } } } */
  25803. +/* { dg-do run { xfail { { arc-*-* avr-*-* avr32-*-* cris-*-* crisv32-*-* h8300-*-* hppa*64*-*-* m32r-*-* m68hc1?-*-* mcore-*-* mn10300-*-* xstormy16-*-* v850*-*-* vax-*-* xtensa*-*-* } || { arm*-*-* && { ! arm32 } } } } } */
  25804. /* -mlongcall disables sibcall patterns. */
  25805. /* { dg-skip-if "" { powerpc*-*-* } { "-mlongcall" } { "" } } */
  25806. /* { dg-options "-O2 -foptimize-sibling-calls" } */
  25807. diff -Nur gcc-4.4.6.orig/gcc/testsuite/gcc.dg/sibcall-4.c gcc-4.4.6/gcc/testsuite/gcc.dg/sibcall-4.c
  25808. --- gcc-4.4.6.orig/gcc/testsuite/gcc.dg/sibcall-4.c 2009-01-08 18:56:52.000000000 +0100
  25809. +++ gcc-4.4.6/gcc/testsuite/gcc.dg/sibcall-4.c 2011-10-22 19:23:08.556581301 +0200
  25810. @@ -5,7 +5,7 @@
  25811. Copyright (C) 2002 Free Software Foundation Inc.
  25812. Contributed by Hans-Peter Nilsson <hp@bitrange.com> */
  25813. -/* { dg-do run { xfail { { arc-*-* avr-*-* cris-*-* crisv32-*-* h8300-*-* hppa*64*-*-* m32r-*-* m68hc1?-*-* mcore-*-* mn10300-*-* xstormy16-*-* v850*-*-* vax-*-* xtensa*-*-* } || { arm*-*-* && { ! arm32 } } } } } */
  25814. +/* { dg-do run { xfail { { arc-*-* avr-*-* avr32-*-* cris-*-* crisv32-*-* h8300-*-* hppa*64*-*-* m32r-*-* m68hc1?-*-* mcore-*-* mn10300-*-* xstormy16-*-* v850*-*-* vax-*-* xtensa*-*-* } || { arm*-*-* && { ! arm32 } } } } } */
  25815. /* -mlongcall disables sibcall patterns. */
  25816. /* { dg-skip-if "" { powerpc*-*-* } { "-mlongcall" } { "" } } */
  25817. /* { dg-options "-O2 -foptimize-sibling-calls" } */
  25818. diff -Nur gcc-4.4.6.orig/gcc/testsuite/gcc.dg/trampoline-1.c gcc-4.4.6/gcc/testsuite/gcc.dg/trampoline-1.c
  25819. --- gcc-4.4.6.orig/gcc/testsuite/gcc.dg/trampoline-1.c 2008-05-12 23:52:38.000000000 +0200
  25820. +++ gcc-4.4.6/gcc/testsuite/gcc.dg/trampoline-1.c 2011-10-22 19:23:08.556581301 +0200
  25821. @@ -47,6 +47,8 @@
  25822. int main (void)
  25823. {
  25824. +#ifndef NO_TRAMPOLINES
  25825. foo ();
  25826. +#endif
  25827. return 0;
  25828. }
  25829. diff -Nur gcc-4.4.6.orig/libgcc/config.host gcc-4.4.6/libgcc/config.host
  25830. --- gcc-4.4.6.orig/libgcc/config.host 2009-04-17 13:58:41.000000000 +0200
  25831. +++ gcc-4.4.6/libgcc/config.host 2011-10-22 19:23:08.556581301 +0200
  25832. @@ -218,6 +218,13 @@
  25833. ;;
  25834. arm-*-pe*)
  25835. ;;
  25836. +avr32-*-linux*)
  25837. + # No need to build crtbeginT.o on uClibc systems. Should probably be
  25838. + # moved to the OS specific section above.
  25839. + extra_parts="crtbegin.o crtbeginS.o crtend.o crtendS.o"
  25840. + ;;
  25841. +avr32-*-*)
  25842. + ;;
  25843. avr-*-rtems*)
  25844. ;;
  25845. avr-*-*)
  25846. diff -Nur gcc-4.4.6.orig/libstdc++-v3/Makefile.in gcc-4.4.6/libstdc++-v3/Makefile.in
  25847. --- gcc-4.4.6.orig/libstdc++-v3/Makefile.in 2010-04-29 17:03:38.000000000 +0200
  25848. +++ gcc-4.4.6/libstdc++-v3/Makefile.in 2011-10-22 19:23:08.556581301 +0200
  25849. @@ -36,6 +36,7 @@
  25850. build_triplet = @build@
  25851. host_triplet = @host@
  25852. target_triplet = @target@
  25853. +LIBOBJDIR =
  25854. DIST_COMMON = $(top_srcdir)/fragment.am $(srcdir)/../config.guess \
  25855. $(srcdir)/../config.sub README ChangeLog $(srcdir)/Makefile.in \
  25856. $(srcdir)/Makefile.am $(top_srcdir)/configure \
  25857. diff -Nur gcc-4.4.6.orig/libstdc++-v3/config/os/gnu-linux/ctype_base.h gcc-4.4.6/libstdc++-v3/config/os/gnu-linux/ctype_base.h
  25858. --- gcc-4.4.6.orig/libstdc++-v3/config/os/gnu-linux/ctype_base.h 2009-04-10 01:23:07.000000000 +0200
  25859. +++ gcc-4.4.6/libstdc++-v3/config/os/gnu-linux/ctype_base.h 2011-10-22 19:23:08.556581301 +0200
  25860. @@ -26,6 +26,8 @@
  25861. //
  25862. // ISO C++ 14882: 22.1 Locales
  25863. //
  25864. +#include <features.h>
  25865. +#include <ctype.h>
  25866. /** @file ctype_base.h
  25867. * This is an internal header file, included by other library headers.
  25868. @@ -40,7 +42,11 @@
  25869. struct ctype_base
  25870. {
  25871. // Non-standard typedefs.
  25872. +#ifdef __UCLIBC__
  25873. + typedef const __ctype_touplow_t* __to_type;
  25874. +#else
  25875. typedef const int* __to_type;
  25876. +#endif
  25877. // NB: Offsets into ctype<char>::_M_table force a particular size
  25878. // on the mask type. Because of this, we don't use an enum.
  25879. diff -Nur gcc-4.4.6.orig/libstdc++-v3/include/Makefile.in gcc-4.4.6/libstdc++-v3/include/Makefile.in
  25880. --- gcc-4.4.6.orig/libstdc++-v3/include/Makefile.in 2009-05-13 02:24:16.000000000 +0200
  25881. +++ gcc-4.4.6/libstdc++-v3/include/Makefile.in 2011-10-22 19:23:08.556581301 +0200
  25882. @@ -36,6 +36,7 @@
  25883. build_triplet = @build@
  25884. host_triplet = @host@
  25885. target_triplet = @target@
  25886. +LIBOBJDIR =
  25887. DIST_COMMON = $(srcdir)/Makefile.am $(srcdir)/Makefile.in \
  25888. $(top_srcdir)/fragment.am
  25889. subdir = include
  25890. diff -Nur gcc-4.4.6.orig/libstdc++-v3/libsupc++/Makefile.in gcc-4.4.6/libstdc++-v3/libsupc++/Makefile.in
  25891. --- gcc-4.4.6.orig/libstdc++-v3/libsupc++/Makefile.in 2009-01-15 21:02:11.000000000 +0100
  25892. +++ gcc-4.4.6/libstdc++-v3/libsupc++/Makefile.in 2011-10-22 19:23:08.556581301 +0200
  25893. @@ -38,6 +38,7 @@
  25894. build_triplet = @build@
  25895. host_triplet = @host@
  25896. target_triplet = @target@
  25897. +LIBOBJDIR =
  25898. DIST_COMMON = $(glibcxxinstall_HEADERS) $(srcdir)/Makefile.am \
  25899. $(srcdir)/Makefile.in $(top_srcdir)/fragment.am
  25900. subdir = libsupc++
  25901. diff -Nur gcc-4.4.6.orig/libstdc++-v3/po/Makefile.in gcc-4.4.6/libstdc++-v3/po/Makefile.in
  25902. --- gcc-4.4.6.orig/libstdc++-v3/po/Makefile.in 2009-01-15 21:02:11.000000000 +0100
  25903. +++ gcc-4.4.6/libstdc++-v3/po/Makefile.in 2011-10-22 19:23:08.556581301 +0200
  25904. @@ -36,6 +36,7 @@
  25905. build_triplet = @build@
  25906. host_triplet = @host@
  25907. target_triplet = @target@
  25908. +LIBOBJDIR =
  25909. DIST_COMMON = $(srcdir)/Makefile.am $(srcdir)/Makefile.in \
  25910. $(top_srcdir)/fragment.am
  25911. subdir = po
  25912. diff -Nur gcc-4.4.6.orig/libstdc++-v3/src/Makefile.in gcc-4.4.6/libstdc++-v3/src/Makefile.in
  25913. --- gcc-4.4.6.orig/libstdc++-v3/src/Makefile.in 2009-08-26 21:04:11.000000000 +0200
  25914. +++ gcc-4.4.6/libstdc++-v3/src/Makefile.in 2011-10-22 19:23:08.556581301 +0200
  25915. @@ -37,6 +37,7 @@
  25916. build_triplet = @build@
  25917. host_triplet = @host@
  25918. target_triplet = @target@
  25919. +LIBOBJDIR =
  25920. DIST_COMMON = $(srcdir)/Makefile.am $(srcdir)/Makefile.in \
  25921. $(top_srcdir)/fragment.am
  25922. subdir = src