1- import { BedrockRuntimeClient , InvokeModelCommand } from "@aws-sdk/client-bedrock-runtime" ;
1+ import {
2+ BedrockRuntimeClient ,
3+ InvokeModelCommand ,
4+ } from "@aws-sdk/client-bedrock-runtime" ;
25import { Buffer } from "buffer" ;
36window . Buffer = Buffer ;
47
@@ -372,9 +375,16 @@ async function sendPromptToAzureOpenAI(prompt, len, apiKey, apiUrl, aiModel) {
372375}
373376
374377async function sendPromptToAmazon ( prompt , len ) {
375- const accessKeyId = document . getElementById ( 'aws-access-key' ) . value || localStorage . getItem ( 'aws-access-key' ) ;
376- const secretAccessKey = document . getElementById ( 'aws-secret-key' ) . value || localStorage . getItem ( 'aws-secret-key' ) ;
377- const region = document . getElementById ( 'aws-region' ) . value || localStorage . getItem ( 'aws-region' ) || 'us-east-1' ;
378+ const accessKeyId =
379+ document . getElementById ( "aws-access-key" ) . value ||
380+ localStorage . getItem ( "aws-access-key" ) ;
381+ const secretAccessKey =
382+ document . getElementById ( "aws-secret-key" ) . value ||
383+ localStorage . getItem ( "aws-secret-key" ) ;
384+ const region =
385+ document . getElementById ( "aws-region" ) . value ||
386+ localStorage . getItem ( "aws-region" ) ||
387+ "us-east-1" ;
378388
379389 // Format the prompt
380390 const formattedPrompt = `Human: ${ prompt } \nAssistant:` ;
@@ -391,16 +401,24 @@ async function sendPromptToAmazon(prompt, len) {
391401 credentials : credentials ,
392402 } ) ;
393403
394- // Prepare the InvokeModelCommand
395- const params = {
396- modelId : 'anthropic.claude-v2' ,
397- accept : 'application/json' ,
398- contentType : 'application/json' ,
399- body : JSON . stringify ( {
400- prompt : formattedPrompt ,
401- max_tokens_to_sample : 300 ,
402- } ) ,
403- } ;
404+ const params = {
405+ "modelId" : "us.anthropic.claude-3-5-sonnet-20241022-v2:0" ,
406+ "body" : JSON . stringify ( {
407+ "anthropic_version" : "bedrock-2023-05-31" ,
408+ "max_tokens" : 1024 ,
409+ "messages" : [
410+ {
411+ "role" : "user" ,
412+ "content" : [
413+ {
414+ "type" : "text" ,
415+ "text" : prompt
416+ }
417+ ]
418+ }
419+ ]
420+ } )
421+ }
404422
405423 try {
406424 const command = new InvokeModelCommand ( params ) ;
@@ -410,15 +428,15 @@ async function sendPromptToAmazon(prompt, len) {
410428 const responseBlob = new Blob ( [ response . body ] ) ;
411429 const responseText = await responseBlob . text ( ) ;
412430 const parsedResponse = JSON . parse ( responseText ) ;
431+ const responseContents = parsedResponse . content [ 0 ] . text ;
413432
414- return parsedResponse . completion . trim ( ) ;
433+ return responseContents . trim ( ) ;
415434 } catch ( err ) {
416435 console . error ( err ) ;
417436 return `# Error: ${ err . message } ` ;
418437 }
419438}
420439
421-
422440async function sendPromptToOllama ( prompt , len , model , ipAddr , portNum ) {
423441 const url = `http://${ ipAddr } :${ portNum } /api/chat` ;
424442 const headers = { "Content-Type" : "application/json" } ;
@@ -620,12 +638,15 @@ async function optimizeCode(imports, code, line, context) {
620638 return result ;
621639 }
622640 }
623- case "amazon" : {
641+ case "amazon" : {
624642 console . log ( "Running " + document . getElementById ( "service-select" ) . value ) ;
625643 console . log ( prompt ) ;
626- const result = await sendPromptToAmazon ( prompt , Math . max ( numWords * 4 , 500 ) ) ;
644+ const result = await sendPromptToAmazon (
645+ prompt ,
646+ Math . max ( numWords * 4 , 500 ) ,
647+ ) ;
627648 return extractCode ( result ) ;
628- }
649+ }
629650 case "azure-openai" : {
630651 console . log ( "Running " + document . getElementById ( "service-select" ) . value ) ;
631652 console . log ( prompt ) ;
@@ -744,7 +765,9 @@ function proposeOptimization(filename, file_number, line, params) {
744765 . join ( "<br />" ) ;
745766 // Display the proposed optimization, with click-to-copy functionality.
746767 elt . innerHTML = `<hr><span title="click to copy" style="cursor: copy" id="opt-${ file_number } -${ line . lineno } ">${ formattedCode } </span>` ;
747- const thisElt = document . getElementById ( `opt-${ file_number } -${ line . lineno } ` ) ;
768+ const thisElt = document . getElementById (
769+ `opt-${ file_number } -${ line . lineno } ` ,
770+ ) ;
748771 thisElt . addEventListener ( "click" , async ( e ) => {
749772 await copyOnClick ( e , message ) ;
750773 // After copying, briefly change the cursor back to the default to provide some visual feedback..
@@ -795,9 +818,16 @@ function time_consumed_str(time_in_ms) {
795818}
796819
797820function makeTooltip ( title , value ) {
798- // Tooltip for time bars, below
799- let secs = value / 100 * globalThis . profile . elapsed_time_sec ;
800- return `(${ title } ) ` + value . toFixed ( 1 ) + "%" + " [" + time_consumed_str ( secs * 1e3 ) + "]"
821+ // Tooltip for time bars, below
822+ let secs = ( value / 100 ) * globalThis . profile . elapsed_time_sec ;
823+ return (
824+ `(${ title } ) ` +
825+ value . toFixed ( 1 ) +
826+ "%" +
827+ " [" +
828+ time_consumed_str ( secs * 1e3 ) +
829+ "]"
830+ ) ;
801831}
802832
803833function makeBar ( python , native , system , params ) {
@@ -900,7 +930,6 @@ function makeBar(python, native, system, params) {
900930 } ;
901931}
902932
903-
904933function makeGPUPie ( util , gpu_device , params ) {
905934 return {
906935 $schema : "https://vega.github.io/schema/vega-lite/v5.json" ,
@@ -959,11 +988,11 @@ function makeGPUBar(util, gpu_device, params) {
959988 data : {
960989 values : [
961990 {
962- x : 0 ,
963- y : util . toFixed ( 0 ) ,
964- q : ( util / 2 ) . toFixed ( 0 ) ,
965- d : util >= 20 ? util . toFixed ( 0 ) + "%" : "" ,
966- dd : "in use: " + util . toFixed ( 0 ) + "%" ,
991+ x : 0 ,
992+ y : util . toFixed ( 0 ) ,
993+ q : ( util / 2 ) . toFixed ( 0 ) ,
994+ d : util >= 20 ? util . toFixed ( 0 ) + "%" : "" ,
995+ dd : "in use: " + util . toFixed ( 0 ) + "%" ,
967996 } ,
968997 ] ,
969998 } ,
@@ -981,17 +1010,17 @@ function makeGPUBar(util, gpu_device, params) {
9811010 field : "dd" ,
9821011 type : "nominal" ,
9831012 legend : false ,
984- scale : { range : [ "goldenrod" , "#f4e6c2" ] } ,
1013+ scale : { range : [ "goldenrod" , "#f4e6c2" ] } ,
9851014 } ,
986- tooltip : [ { field : "dd" , type : "nominal" , title : gpu_device + ":" } ] ,
1015+ tooltip : [ { field : "dd" , type : "nominal" , title : gpu_device + ":" } ] ,
9871016 } ,
9881017 } ,
9891018 {
9901019 mark : {
9911020 type : "text" ,
9921021 align : "center" ,
9931022 baseline : "middle" ,
994- dx : 0 ,
1023+ dx : 0 ,
9951024 } ,
9961025 encoding : {
9971026 x : {
@@ -1001,7 +1030,7 @@ function makeGPUBar(util, gpu_device, params) {
10011030 } ,
10021031 text : { field : "d" } ,
10031032 color : { value : "white" } ,
1004- tooltip : [ { field : "dd" , type : "nominal" , title : gpu_device + ":" } ] ,
1033+ tooltip : [ { field : "dd" , type : "nominal" , title : gpu_device + ":" } ] ,
10051034 } ,
10061035 } ,
10071036 ] ,
@@ -1292,13 +1321,13 @@ function makeTableHeader(fname, gpu, gpu_device, memory, params) {
12921321 title : [ gpu_device , "util." ] ,
12931322 color : CopyColor ,
12941323 width : 0 ,
1295- info : `% utilization of ${ gpu_device } by line / function (may be inaccurate if ${ gpu_device } is not dedicated)` ,
1324+ info : `% utilization of ${ gpu_device } by line / function (may be inaccurate if ${ gpu_device } is not dedicated)` ,
12961325 } ) ;
12971326 columns . push ( {
12981327 title : [ gpu_device , "memory" ] ,
12991328 color : CopyColor ,
13001329 width : 0 ,
1301- info : `Peak ${ gpu_device } memory allocated by line / function (may be inaccurate if ${ gpu_device } is not dedicated)` ,
1330+ info : `Peak ${ gpu_device } memory allocated by line / function (may be inaccurate if ${ gpu_device } is not dedicated)` ,
13021331 } ) ;
13031332 }
13041333 columns . push ( { title : [ "" , "" ] , color : "black" , width : 100 } ) ;
@@ -1456,7 +1485,7 @@ function makeProfileLine(
14561485 line . n_cpu_percent_python ,
14571486 line . n_cpu_percent_c ,
14581487 line . n_sys_percent ,
1459- { height : 20 , width : 100 } ,
1488+ { height : 20 , width : 100 } ,
14601489 ) ,
14611490 ) ;
14621491 } else {
@@ -1557,20 +1586,25 @@ function makeProfileLine(
15571586 s += `<td style="width: 50; vertical-align: middle" align="right" data-sort="${ line . n_gpu_percent } ">` ;
15581587 s += `<span style="height: 20; width: 30; vertical-align: middle" id="gpu_pie${ gpu_pies . length } "></span>` ;
15591588 s += "</td>" ;
1560- gpu_pies . push ( makeGPUPie ( line . n_gpu_percent , prof . gpu_device , { height : 20 , width : 100 } ) ) ;
1589+ gpu_pies . push (
1590+ makeGPUPie ( line . n_gpu_percent , prof . gpu_device , {
1591+ height : 20 ,
1592+ width : 100 ,
1593+ } ) ,
1594+ ) ;
15611595 // gpu_pies.push(makeGPUBar(line.n_gpu_percent, prof.gpu_device, { height: 20, width: 100 }));
15621596 }
15631597 if ( true ) {
15641598 if ( line . n_gpu_peak_memory_mb < 1.0 || line . n_gpu_percent < 1.0 ) {
15651599 s += '<td style="width: 100"></td>' ;
15661600 } else {
1567- let mem = line . n_gpu_peak_memory_mb ;
1568- let memStr = "MB" ;
1569- if ( mem >= 1024 ) {
1570- mem /= 1024 ;
1571- memStr = "GB" ;
1572- }
1573- s += `<td style="width: 100; vertical-align: middle" align="right"><font style="font-size: small" color="${ CopyColor } ">${ mem . toFixed ( 0 ) } ${ memStr } </font></td>` ;
1601+ let mem = line . n_gpu_peak_memory_mb ;
1602+ let memStr = "MB" ;
1603+ if ( mem >= 1024 ) {
1604+ mem /= 1024 ;
1605+ memStr = "GB" ;
1606+ }
1607+ s += `<td style="width: 100; vertical-align: middle" align="right"><font style="font-size: small" color="${ CopyColor } ">${ mem . toFixed ( 0 ) } ${ memStr } </font></td>` ;
15741608 }
15751609 }
15761610 }
@@ -1716,8 +1750,9 @@ async function display(prof) {
17161750 }
17171751
17181752 // Restore the old GPU toggle from local storage (if any).
1719- const gpu_checkbox = document . getElementById ( "use-gpu-checkbox" ) || '' ;
1720- const old_gpu_checkbox = window . localStorage . getItem ( "use-gpu-checkbox" ) || '' ;
1753+ const gpu_checkbox = document . getElementById ( "use-gpu-checkbox" ) || "" ;
1754+ const old_gpu_checkbox =
1755+ window . localStorage . getItem ( "use-gpu-checkbox" ) || "" ;
17211756 if ( old_gpu_checkbox ) {
17221757 if ( gpu_checkbox . checked . toString ( ) != old_gpu_checkbox ) {
17231758 gpu_checkbox . click ( ) ;
@@ -1914,7 +1949,9 @@ async function display(prof) {
19141949 s += `<div style="${ displayStr } " id="profile-${ id } ">` ;
19151950 s += `<table class="profile table table-hover table-condensed" id="table-${ tableID } ">` ;
19161951 tableID ++ ;
1917- s += makeTableHeader ( ff [ 0 ] , prof . gpu , prof . gpu_device , prof . memory , { functions : false } ) ;
1952+ s += makeTableHeader ( ff [ 0 ] , prof . gpu , prof . gpu_device , prof . memory , {
1953+ functions : false ,
1954+ } ) ;
19181955 s += "<tbody>" ;
19191956 // Print per-line profiles.
19201957 let prevLineno = - 1 ;
@@ -1956,7 +1993,9 @@ async function display(prof) {
19561993 // Print out function summaries.
19571994 if ( prof . files [ ff [ 0 ] ] . functions . length ) {
19581995 s += `<table class="profile table table-hover table-condensed" id="table-${ tableID } ">` ;
1959- s += makeTableHeader ( ff [ 0 ] , prof . gpu , prof . gpu_device , prof . memory , { functions : true } ) ;
1996+ s += makeTableHeader ( ff [ 0 ] , prof . gpu , prof . gpu_device , prof . memory , {
1997+ functions : true ,
1998+ } ) ;
19601999 s += "<tbody>" ;
19612000 tableID ++ ;
19622001 for ( const l in prof . files [ ff [ 0 ] ] . functions ) {
@@ -2183,7 +2222,7 @@ function replaceDivWithSelect() {
21832222 } else {
21842223 console . error ( 'Div with ID "language-local-models" not found.' ) ;
21852224 }
2186- // atLeastOneModel = true;
2225+ // atLeastOneModel = true;
21872226 } ) ;
21882227}
21892228
@@ -2272,7 +2311,7 @@ function sendHeartbeat() {
22722311}
22732312
22742313window . addEventListener ( "load" , ( ) => {
2275- load ( profile ) ;
2314+ load ( profile ) ;
22762315} ) ;
22772316
22782317setInterval ( sendHeartbeat , 10000 ) ; // Send heartbeat every 10 seconds
0 commit comments