aiom_pack
Version:
Framework for interdependent (mcmc-like) behavioral experiments
95 lines (87 loc) • 3.41 kB
JavaScript
// what kind of stimuli you wish to send to the front-end?
// If just coordinate, use 'raw'; otherwise, you need to find out yourself.
// keep the original stimuli: use for local test
const axios = require('axios');
const fs = require('fs');
const path = require('path');
// const lower_bound = Number(process.env.lower_bound);
// const upper_bound = Number(process.env.upper_bound);
async function raw(array) {
return array;
}
function limit_array_in_range(array, min, max) {
return array.map((val) => {
if (val < min) {
const remainder = Math.abs(val-min) % (max-min);
return max - remainder;
}
if (val > max) {
const remainder = Math.abs(val-max) % (max-min);
return min + remainder;
}
return val;
});
}
// turn the stimuli into an image
function to_image(array) {
// const rearray = process.env.experiment.includes("GSP") ? array:limit_array_in_range(array);
const url = process.env.imageurl+'/generate';
return axios.post(url, {
vector: array,
}, {headers: {
'accept': 'application/json',
'Content-Type': 'application/json',
},
responseType: 'json',
})
.then(response => {
return {
image: `data:image/png;base64,${response.data.image}`,
posterior: response.data.pred_label,
};
})
.catch((error) => {
console.error('Error:', error);
});
}
function to_image_gsp(obj) {
const url = process.env.imageurl+'/generate_batch';
return axios.post(url, {
vector: obj,
}, {headers: {
'accept': 'application/json',
'Content-Type': 'application/json',
},
responseType: 'json',
})
.then(response => {
return response.data.images.map(img => `data:image/png;base64,${img}`);
})
.catch((error) => {
console.error('Error:', error);
});
}
function grab_image(path_img) {
// get image data from the path
const imageData = fs.readFileSync(path_img);
const base64 = Buffer.from(imageData).toString('base64');
return `data:image/png;base64,${base64}`;
}
function get_attention_stimuli_path(attentionDir, current_class) {
const dirlist = fs.readdirSync(attentionDir);
const matchingDirs = dirlist.filter(dir => dir.includes(current_class));
if (matchingDirs.length === 0) {
throw new Error(`No attention check directory found for class: ${current_class}`);
}
const attention_check_dir = matchingDirs[Math.floor(Math.random() * matchingDirs.length)];
const s1 = attention_check_dir.split('_')[0];
const s2 = attention_check_dir.split('_')[1];
const example_path = path.join(attentionDir, attention_check_dir);
// list all files in the production_example directory
const exampleFiles = fs.readdirSync(example_path);
const extension = exampleFiles[0].split('.').pop();
const attention_stimulus_1 = path.join(attentionDir, attention_check_dir, s1+'.'+extension);
const attention_stimulus_2 = path.join(attentionDir, attention_check_dir, s2+'.'+extension);
return [attention_stimulus_1, attention_stimulus_2, [s1, s2]];
}
module.exports = {raw, to_image, to_image_gsp, grab_image, get_attention_stimuli_path, limit_array_in_range};