Add mutable datastores that can be linked to multiple functions
This commit is contained in:
43
app.py
43
app.py
@@ -18,6 +18,7 @@ from routes.llm import llm
|
||||
from routes.auth import auth
|
||||
from routes.settings import settings
|
||||
from routes.community import community
|
||||
from routes.shared_env import shared_env
|
||||
from constants import DEFAULT_FUNCTION_NAME, DEFAULT_SCRIPT, DEFAULT_ENVIRONMENT
|
||||
from flask_apscheduler import APScheduler
|
||||
import asyncio
|
||||
@@ -47,6 +48,7 @@ app.register_blueprint(llm, url_prefix='/llm')
|
||||
app.register_blueprint(auth, url_prefix='/auth')
|
||||
app.register_blueprint(settings, url_prefix='/settings')
|
||||
app.register_blueprint(community, url_prefix='/community')
|
||||
app.register_blueprint(shared_env, url_prefix='/shared_env')
|
||||
|
||||
# Swith to inter app routing, which results in speed up from ~400ms to ~270ms
|
||||
# https://stackoverflow.com/questions/76886643/linking-two-not-exposed-dokku-apps
|
||||
@@ -212,6 +214,24 @@ async def execute_http_function(user_id, function):
|
||||
if request.data and not request.is_json:
|
||||
request_data['text'] = request.data.decode('utf-8')
|
||||
|
||||
# Load and inject shared environments (namespaced)
|
||||
shared_envs = db.execute('''
|
||||
SELECT se.id, se.name, se.environment
|
||||
FROM http_function_shared_envs hfse
|
||||
JOIN shared_environments se ON hfse.shared_env_id = se.id
|
||||
WHERE hfse.http_function_id = %s
|
||||
ORDER BY se.name
|
||||
''', [http_function['id']])
|
||||
|
||||
# Inject shared environments as nested objects
|
||||
combined_environment = environment.copy()
|
||||
shared_env_map = {} # Track shared env IDs for later extraction
|
||||
if shared_envs:
|
||||
for se in shared_envs:
|
||||
env_data = json.loads(se['environment']) if isinstance(se['environment'], str) else se['environment']
|
||||
combined_environment[se['name']] = env_data
|
||||
shared_env_map[se['name']] = se['id']
|
||||
|
||||
# Call the Node.js API asynchronously
|
||||
if runtime == 'deno':
|
||||
api_url = DENO_API_URL
|
||||
@@ -220,10 +240,29 @@ async def execute_http_function(user_id, function):
|
||||
else:
|
||||
api_url = NODE_API_URL
|
||||
async with aiohttp.ClientSession() as session:
|
||||
async with session.post(api_url, json={'code': code, 'request': request_data, 'environment': environment, 'name': function_name}) as response:
|
||||
async with session.post(api_url, json={'code': code, 'request': request_data, 'environment': combined_environment, 'name': function_name}) as response:
|
||||
response_data = await response.json()
|
||||
|
||||
db.update_http_function_environment_info_and_invoked_count(user_id, function_name, response_data['environment'])
|
||||
# Extract and persist shared environment mutations
|
||||
returned_env = response_data['environment']
|
||||
function_specific_env = {}
|
||||
|
||||
# Separate function-specific properties from shared environments
|
||||
for key, value in returned_env.items():
|
||||
if key in shared_env_map:
|
||||
# This is a shared environment - save it back
|
||||
db.execute(
|
||||
'UPDATE shared_environments SET environment=%s, updated_at=NOW() WHERE id=%s',
|
||||
[json.dumps(value), shared_env_map[key]],
|
||||
commit=True
|
||||
)
|
||||
else:
|
||||
# This is function-specific - keep it
|
||||
function_specific_env[key] = value
|
||||
|
||||
# Update function's own environment (without shared envs)
|
||||
db.update_http_function_environment_info_and_invoked_count(user_id, function_name, function_specific_env)
|
||||
|
||||
db.add_http_function_invocation(
|
||||
http_function['id'],
|
||||
response_data['status'],
|
||||
|
||||
Reference in New Issue
Block a user