Uploading IDF files directly to Custom Energy Sim

Hi @mostapha and team,

I am trying to develop a workflow using the API to upload a large set of IDF files to a folder and then run them using the “Custom Energy Simulation” recipe.

Using the code below I was able to upload the IDF files to the a Folder within a project. But when I create the study there are 0 runs. The code is provided below. The next step is to figure out how to download specific result outputs from the SQL.

Thanks,
Cameron

import os
import pathlib
import time

from pollination_streamlit.api.client import ApiClient
from pollination_streamlit.interactors import NewJob, Recipe
from queenbee.job.job import JobStatusEnum

owner='adaptis-group'
project='test'
api_key = "API Key"
assert api_key is not None, 'You must provide valid Pollination API key.'
api_client = ApiClient(api_token=api_key)
recipe = Recipe('ladybug-tools','custom-energy-sim','0.3.12', client=api_client)
recipe_inputs = {
    'additional-idf': None,
    'ddy': r"C:\Users\camer\Documents\Test IDF files\CAN_ON_Sault.Ste.Marie.AP.712600_CWEC2016.ddy",
    'epw': r"C:\Users\camer\Documents\Test IDF files\CAN_ON_Sault.Ste.Marie.AP.712600_CWEC2016.epw",
    'measures':None,
    'model':None,
    'sim-par':None
}

new_study = NewJob(owner, project, recipe, client=api_client)

new_study.name = 'Test Python Simulation'

root_folder = r"C:\Users\camer\Documents\Test IDF files"  # path to the folder with all the HBJSON files

study_inputs = []

for model in pathlib.Path(root_folder).glob('*.idf'):
    inputs = dict(recipe_inputs)  # create a copy of the recipe
    uploaded_path = new_study.upload_artifact(model, target_folder="dataset1")
    inputs['model'] = uploaded_path
    inputs['model_id'] = model.stem  # I'm using the file name as the id.
    study_inputs.append(inputs)

# add the inputs to the study
# each set of inputs create a new run
new_study.arguments = study_inputs

# create the study
running_study = new_study.create()

Hi, @cameron-lawrence - from what I can see the path to ddy and epw are set to a local path. That won’t work.

You have to either upload these files to the project manually and then provide the relative path, or upload them as part of the code similar to how you are uploading the idf files.

Assuming that you upload the files into the root folder of the project. This is how the recipe inputs will look like.

recipe_inputs = {
    'additional-idf': None,
    'ddy': "CAN_ON_Sault.Ste.Marie.AP.712600_CWEC2016.ddy",
    'epw': "CAN_ON_Sault.Ste.Marie.AP.712600_CWEC2016.epw",
    'measures': None,
    'model': None,
    'sim-par': None
}

The rest of the code looks fine to me as-is.

Thanks @mostapha I made that change.

Still I’m getting no runs in my study. The files are uploaded as expected in the “dataset1” folder

import os
import pathlib
import time

from pollination_streamlit.api.client import ApiClient
from pollination_streamlit.interactors import NewJob, Recipe
from queenbee.job.job import JobStatusEnum

owner='adaptis-group'
project='test'
api_key = "API-Key"
assert api_key is not None, 'You must provide valid Pollination API key.'
api_client = ApiClient(api_token=api_key)
recipe = Recipe('ladybug-tools','custom-energy-sim','0.3.12', client=api_client)
recipe_inputs = {
    'additional-idf': None,
    'ddy': "dataset1/CAN_ON_Sault.Ste.Marie.AP.712600_CWEC2016.ddy",
    'epw': "dataset1/CAN_ON_Sault.Ste.Marie.AP.712600_CWEC2016.epw",
    'measures':None,
    'model':None,
    'sim-par':None
}

new_study = NewJob(owner, project, recipe, client=api_client)

new_study.name = 'Test Python Simulation'

root_folder = r"C:\Users\camer\Documents\Test IDF files"  # path to the folder with all the HBJSON files


ddy_file=pathlib.Path(r"C:\Users\camer\Documents\Test IDF files\CAN_ON_Sault.Ste.Marie.AP.712600_CWEC2016.ddy")
epw_file=pathlib.Path(r"C:\Users\camer\Documents\Test IDF files\CAN_ON_Sault.Ste.Marie.AP.712600_CWEC2016.epw")


new_study.upload_artifact(epw_file,target_folder='dataset1')
new_study.upload_artifact(ddy_file,target_folder='dataset1')

study_inputs = []

for model in pathlib.Path(root_folder).glob('*.idf'):
    inputs = dict(recipe_inputs)  # create a copy of the recipe
    uploaded_path = new_study.upload_artifact(model, target_folder="dataset1")  
    inputs['model'] = uploaded_path
    inputs['model_id'] = model.stem  # I'm using the file name as the id.
    study_inputs.append(inputs)

# add the inputs to the study
# each set of inputs create a new run
new_study.arguments = study_inputs

# create the study
running_study = new_study.create()

Hi, @cameron-lawrence, let me put a working example together and share it here. From what I see, your runs are not being submitted to the Pollination server.

Hi @cameron-lawrence - your code is running fine on my end. I can submit the runs with no issues:

I tested a few mistakes to see if I can recreate your issues. I wonder if it is happening because you passing None to string inputs. Can you try the code below and see if it works?


import os
import pathlib
import time

from pollination_streamlit.api.client import ApiClient
from pollination_streamlit.interactors import NewJob, Recipe
from queenbee.job.job import JobStatusEnum

owner='adaptis-group'
project='test'
api_key = "API-Key"
assert api_key is not None, 'You must provide valid Pollination API key.'
api_client = ApiClient(api_token=api_key)
recipe = Recipe('ladybug-tools','custom-energy-sim','0.3.12', client=api_client)

new_study = NewJob(owner, project, recipe, client=api_client)

new_study.name = 'Test Python Simulation'

root_folder = r"C:\Users\camer\Documents\Test IDF files"  # path to the folder with all the IDF files


ddy_file = pathlib.Path(r"C:\Users\camer\Documents\Test IDF files\CAN_ON_Sault.Ste.Marie.AP.712600_CWEC2016.ddy")
epw_file = pathlib.Path(r"C:\Users\camer\Documents\Test IDF files\CAN_ON_Sault.Ste.Marie.AP.712600_CWEC2016.epw")

upload_folder = 'dataset1'
ddy_file_path = new_study.upload_artifact(epw_file, target_folder=upload_folder)
epw_file_path = new_study.upload_artifact(ddy_file, target_folder=upload_folder)

recipe_inputs = {
    'ddy': ddy_file_path,
    'epw': epw_file_path
}

study_inputs = []

for model in pathlib.Path(root_folder).glob('*.idf'):
    inputs = dict(recipe_inputs)  # create a copy of the recipe
    uploaded_path = new_study.upload_artifact(model, target_folder="dataset1")  
    inputs['model'] = uploaded_path
    inputs['model_id'] = model.stem  # I'm using the file name as the id.
    study_inputs.append(inputs)

# add the inputs to the study
# each set of inputs creates a new run
new_study.arguments = study_inputs

# create the study
running_study = new_study.create()

Hi @mostapha thanks for looking into this. I think the issue must have been because of using None for the string inputs. Looks like its working now!

1 Like

A post was split to a new topic: Use a zipped folder as input