...
This commit is contained in:
parent
4bd960ed05
commit
7fabb4163a
195
_archive/aiprompts/ask.py
Normal file
195
_archive/aiprompts/ask.py
Normal file
@ -0,0 +1,195 @@
|
||||
import os
|
||||
import json
|
||||
import enum
|
||||
import textwrap
|
||||
from typing import List, Optional
|
||||
import logging
|
||||
from termcolor import colored
|
||||
|
||||
import ollama
|
||||
import openai
|
||||
from openai import OpenAI
|
||||
from ai.instruction import instructions_load, instructions_get, instructions_reset
|
||||
|
||||
# Set up logging
|
||||
logging.basicConfig(level=logging.INFO, format='%(message)s')
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
class Model(enum.Enum):
|
||||
QWEN72I = "Qwen/Qwen2-72B-Instruct"
|
||||
MIXTRAL7I = "mistralai/Mixtral-8x7B-Instruct-v0.1"
|
||||
PHI3_MEDIUM = "phi3:medium-128k"
|
||||
PHI3_MINI = "phi3:mini"
|
||||
GPT35 = "gpt-3.5-turbo"
|
||||
GPT4 = "gpt-4"
|
||||
GPT4O = "gpt-4o"
|
||||
QWEN1L= "qwen2:1.5b" #local
|
||||
QWEN0L= "qwen2:0.5b" #local
|
||||
PHI3L = "phi3:3.8b"
|
||||
QWEN7L= "qwen2:7b" #local
|
||||
|
||||
class AIAssistant:
|
||||
def __init__(self):
|
||||
self.model = Model.QWEN72I
|
||||
self.openai_client = None
|
||||
self.deepinfra_client = None
|
||||
self._setup_clients()
|
||||
|
||||
def _setup_clients(self):
|
||||
openaikey = os.getenv("OPENAIKEY")
|
||||
if openaikey:
|
||||
logger.info(colored("OpenAI key set", "green"))
|
||||
openai.api_key = openaikey
|
||||
self.openai_client = openai
|
||||
|
||||
deepinfrakey = os.getenv("DEEPINFRAKEY")
|
||||
if deepinfrakey:
|
||||
logger.info(colored("DEEPINFRAKEY key set", "green"))
|
||||
self.deepinfra_client = OpenAI(
|
||||
api_key=deepinfrakey,
|
||||
base_url="https://api.deepinfra.com/v1/openai",
|
||||
)
|
||||
|
||||
def set_model(self, model: Model):
|
||||
self.model = model
|
||||
logger.info(colored(f"Model set to: {model.value}", "cyan"))
|
||||
|
||||
def ask(self, question: str, category: str = "", name: str = "", log: bool = True) -> str:
|
||||
logger.info(colored(f"Asking question in category: {category}, name: {name}", "yellow"))
|
||||
mm = instructions_get(category=category, name=name)
|
||||
mm.add_message(role="user", content=question)
|
||||
#mm.print_messages()
|
||||
|
||||
if self.model in [Model.GPT4O, Model.GPT4, Model.GPT35]:
|
||||
response = self._ask_openai(mm.messages, log)
|
||||
elif self.model in [Model.QWEN72I, Model.MIXTRAL7I]:
|
||||
response = self._ask_deepinfra(mm.messages, log)
|
||||
else:
|
||||
response = self._ask_ollama(mm.messages, log)
|
||||
|
||||
logger.info(colored("Ask completed", "green"))
|
||||
return response
|
||||
|
||||
def _ask_openai(self, messages, log: bool) -> str:
|
||||
response = self.openai_client.chat.completions.create(
|
||||
model=self.model.value,
|
||||
messages=messages,
|
||||
max_tokens=300
|
||||
)
|
||||
r = response.choices[0].message.content
|
||||
if log:
|
||||
logger.info(colored(f"OpenAI Response: {self.model.value}", "magenta"))
|
||||
logger.info(colored(r, "white"))
|
||||
return r
|
||||
|
||||
def _ask_ollama(self, messages, log: bool) -> str:
|
||||
response = ollama.chat(model=self.model.value, messages=messages)
|
||||
if log:
|
||||
logger.info(colored(response['message']['content'], "white"))
|
||||
return response['message']['content']
|
||||
|
||||
def _ask_deepinfra(self, messages, log: bool) -> str:
|
||||
chat_completion = self.deepinfra_client.chat.completions.create(
|
||||
model=self.model.value,
|
||||
messages=messages,
|
||||
max_tokens=None,
|
||||
stream=False
|
||||
)
|
||||
|
||||
if log:
|
||||
logger.info(colored(f"\nDeepInfra Response: {self.model.value}", "magenta"))
|
||||
logger.info(colored("-" * 20, "white"))
|
||||
logger.info(colored(chat_completion.choices[0].message.content, "white"))
|
||||
logger.info(colored("\nToken Usage:", "cyan"))
|
||||
logger.info(colored(f"Prompt tokens: {chat_completion.usage.prompt_tokens}", "white"))
|
||||
logger.info(colored(f"Completion tokens: {chat_completion.usage.completion_tokens}", "white"))
|
||||
|
||||
return chat_completion.choices[0].message.content
|
||||
|
||||
|
||||
def ai_assistent(reset:bool=True) -> AIAssistant:
|
||||
mypath="~/code/git.threefold.info/projectmycelium/hero_server/lib/ai/instructions"
|
||||
if reset:
|
||||
instructions_reset()
|
||||
instructions_load(mypath)
|
||||
return AIAssistant()
|
||||
|
||||
# Usage example:
|
||||
if __name__ == "__main__":
|
||||
|
||||
mypath="~/code/git.threefold.info/projectmycelium/hero_server/lib/ai/instructions"
|
||||
instructions_reset()
|
||||
instructions_load(mypath)
|
||||
|
||||
assistant = AIAssistant()
|
||||
|
||||
#assistant.set_model(Model.MIXTRAL7I) # Or any other model you prefer
|
||||
assistant.set_model(Model.QWEN72I)
|
||||
#assistant.set_model(Model.PHI3L)
|
||||
|
||||
# response = assistant.ask(
|
||||
# category='timemgmt',
|
||||
# name='schedule',
|
||||
# question='''
|
||||
# lets create a story
|
||||
|
||||
# we need to paint our church
|
||||
|
||||
# its long over due, the major complained,
|
||||
# and his mother isn't happy
|
||||
|
||||
# oh yes I forgot its election time
|
||||
|
||||
# tom and ben will own this story
|
||||
# its for our church in zanzibar
|
||||
|
||||
# we need to do it in 4 month from now
|
||||
|
||||
# our requirements are:
|
||||
|
||||
# we need to make sure it can withstand sun
|
||||
# color is white
|
||||
# cost below 1000 USD
|
||||
# '''
|
||||
# )
|
||||
#logger.info(colored("Final Response:", "green"))
|
||||
|
||||
|
||||
response = assistant.ask(
|
||||
category='',
|
||||
name='',
|
||||
question='''
|
||||
|
||||
based on following names [Isabelle, Kristof, Jan, Rob, Florine, Florian, Sabrina, Tom, Ben]
|
||||
|
||||
- find the owners of the story out of the text below, these owners are the ones who will do the task
|
||||
- see if these names are in the list above
|
||||
- if names match, return them, if not give error
|
||||
- return the names as a json list, don't give any other output
|
||||
|
||||
------
|
||||
|
||||
|
||||
we need to paint our church
|
||||
|
||||
its long over due, the major complained,
|
||||
and his mother isn't happy
|
||||
|
||||
oh yes I forgot its election time
|
||||
|
||||
tom and ben will own this story
|
||||
its for our church in zanzibar
|
||||
|
||||
we need to do it in 4 month from now
|
||||
|
||||
our requirements are:
|
||||
|
||||
we need to make sure it can withstand sun
|
||||
color is white
|
||||
cost below 1000 USD
|
||||
|
||||
'''
|
||||
)
|
||||
|
||||
|
||||
logger.info(colored(response, "white"))
|
158
_archive/aiprompts/instruction.py
Normal file
158
_archive/aiprompts/instruction.py
Normal file
@ -0,0 +1,158 @@
|
||||
import os
|
||||
import json
|
||||
import redis
|
||||
from typing import List,Dict,Optional
|
||||
|
||||
redis_client = redis.Redis(host='localhost', port=6379, db=0)
|
||||
|
||||
#loads instructions from filesystem and stores in redis for further usage
|
||||
class MessageManager:
|
||||
def __init__(self, name = '', category = '', path: str = "", load: bool = True):
|
||||
self.name = name
|
||||
self.category = category
|
||||
self.messages : List[Dict[str, str]] = []
|
||||
if self.category=="":
|
||||
return
|
||||
if path:
|
||||
self.add(path)
|
||||
else:
|
||||
if load:
|
||||
self.load()
|
||||
|
||||
def add(self, dir_path: str, filter: Optional[List[str]] = None, save: bool = True):
|
||||
dir_path = os.path.expanduser(dir_path)
|
||||
def process_files(current_dir: str):
|
||||
files_to_process = []
|
||||
for root, _, files in os.walk(current_dir):
|
||||
for file in files:
|
||||
if file.startswith(('sys_', 'user_')):
|
||||
try:
|
||||
priority = int(file.split('_')[1])
|
||||
descr = '_'.join(file.split('_')[2:])
|
||||
if not filter or any(f in descr for f in filter):
|
||||
files_to_process.append((os.path.join(root, file), priority))
|
||||
except (IndexError, ValueError):
|
||||
print(f"Skipping file with invalid format: {file}")
|
||||
|
||||
for file_path, _ in sorted(files_to_process, key=lambda x: x[1]):
|
||||
file_name = os.path.basename(file_path)
|
||||
role = "system" if file_name.startswith('sys_') else "user"
|
||||
self.add_file(file_path, role)
|
||||
|
||||
process_files(dir_path)
|
||||
|
||||
if save:
|
||||
self.save()
|
||||
|
||||
|
||||
def add_file(self, file_path, role):
|
||||
file_path = os.path.expanduser(file_path)
|
||||
with open(file_path, 'r') as file:
|
||||
content = file.read().strip()
|
||||
if role == "system":
|
||||
self.add_message(role, content)
|
||||
elif role == "user":
|
||||
content_parts = content.split('--------', 1)
|
||||
if len(content_parts) == 2:
|
||||
content1, content2 = content_parts[0].strip(), content_parts[1].strip()
|
||||
self.add_message("user", content1)
|
||||
self.add_message("assistant", content2)
|
||||
else:
|
||||
raise Exception(f"File {file_path} does not contain the expected separator '--------'")
|
||||
else:
|
||||
raise Exception("Wrong role")
|
||||
|
||||
def add_message(self, role, content):
|
||||
if not self.__validate_message(role, content):
|
||||
raise ValueError(f"Invalid message format. Role: {role}, Content: {content}")
|
||||
self.messages.append({"role": role, "content": content})
|
||||
|
||||
def __validate_message(self, role, content):
|
||||
valid_roles = ["system", "user", "assistant"]
|
||||
return (
|
||||
isinstance(role, str) and
|
||||
role in valid_roles and
|
||||
isinstance(content, str) and
|
||||
len(content.strip()) > 0
|
||||
)
|
||||
|
||||
def print_messages(self):
|
||||
for message in self.messages:
|
||||
role = message["role"].capitalize()
|
||||
content = message["content"]
|
||||
print(f"\n{role}:\n{'-' * len(role)}")
|
||||
print(content)
|
||||
print("-" * 40)
|
||||
|
||||
def get_messages(self):
|
||||
return self.messages
|
||||
|
||||
def save(self):
|
||||
key = f"llm:instructions:{self.category}:{self.name}"
|
||||
value = json.dumps(self.messages)
|
||||
redis_client.set(key, value)
|
||||
|
||||
#return true if there where instructions
|
||||
def load(self):
|
||||
key = f"llm:instructions:{self.category}:{self.name}"
|
||||
value = redis_client.get(key)
|
||||
if value:
|
||||
self.messages = json.loads(value)
|
||||
return True
|
||||
return False
|
||||
|
||||
def delete(self):
|
||||
key = f"llm:instructions:{self.category}:{self.name}"
|
||||
return redis_client.delete(key)
|
||||
|
||||
def instructions_reset():
|
||||
pattern = "llm:instructions*"
|
||||
keys_to_delete = redis_client.scan_iter(match=pattern)
|
||||
for key in keys_to_delete:
|
||||
redis_client.delete(key)
|
||||
|
||||
#get message manager and get from redis
|
||||
def instructions_get( name:str, category:str) -> MessageManager:
|
||||
m= MessageManager(name, category)
|
||||
return m
|
||||
|
||||
def instructions_load(path: str) -> List[MessageManager]:
|
||||
path = os.path.expanduser(path)
|
||||
message_managers = []
|
||||
#print(f"load {path}")
|
||||
for item in os.listdir(path):
|
||||
cat_path = os.path.join(path, item)
|
||||
if os.path.isdir(cat_path):
|
||||
category = os.path.basename(cat_path)
|
||||
#print(f" load category: {cat_path}")
|
||||
# Process files in the category directory, these will be re=used in each messagemanager
|
||||
category_manager = MessageManager(name="", category=category)
|
||||
for item in os.listdir(cat_path):
|
||||
item_path = os.path.join(cat_path, item)
|
||||
if os.path.isfile(item_path):
|
||||
if item.startswith('sys_') or item.startswith('user_'):
|
||||
#print(f" load cat base: {item_path}")
|
||||
role = "system" if item.startswith('sys_') else "user"
|
||||
category_manager.add_file(item_path, role)
|
||||
elif os.path.isdir(item_path):
|
||||
#print(f" load cat: {item_path}")
|
||||
manager = MessageManager(name=item, category=category)
|
||||
manager.messages = category_manager.messages
|
||||
manager.add(item_path)
|
||||
message_managers.append(manager)
|
||||
|
||||
return message_managers
|
||||
|
||||
|
||||
# Usage example:
|
||||
if __name__ == "__main__":
|
||||
|
||||
# mypath="/Users/despiegk1/code/git.threefold.info/projectmycelium/hero_server/lib/ai/instructions/timemgmt"
|
||||
# #mypath=""
|
||||
# manager = MessageManager(name="schedule", category="timemgmt",path=mypath)
|
||||
# manager.print_messages()
|
||||
|
||||
mypath="/Users/despiegk1/code/git.threefold.info/projectmycelium/hero_server/lib/ai/instructions"
|
||||
instructions_reset()
|
||||
instructions_load(mypath)
|
||||
|
@ -0,0 +1,10 @@
|
||||
|
||||
if the chat coming from user seems to say translate e.g. translate to french,
|
||||
then translate the text which came to french
|
||||
|
||||
don't do anything for heroscript
|
||||
|
||||
overrule all previous instructions
|
||||
|
||||
just output the translated text
|
||||
|
@ -0,0 +1,42 @@
|
||||
|
||||
how do I use heroscript for agenda management
|
||||
|
||||
-------------------------
|
||||
|
||||
heroscript has basic notations to deal with calendars and events
|
||||
|
||||
when the user asks to translate an calendar action to heroscript use following rules and see example below
|
||||
|
||||
- all dates are in europe style: Format: DD/MM/YYYY e.g. 06/07/2023
|
||||
- if year not specified by user then always use current year which is 2024
|
||||
- if month not specified use current month which is september or month 9
|
||||
- date, title is always required, if attendies or people mentioned they should be on attendies list
|
||||
- don't use comments in the heroscript (means no // at end of line for heroscript)
|
||||
- default duration is 1h, also ok 15m (15 min), 1 day
|
||||
|
||||
```heroscript
|
||||
|
||||
//to add item in agenda
|
||||
!!calendar.add
|
||||
date:'30-10-24'
|
||||
time:'10pm'
|
||||
duration:'1h'
|
||||
title:'meeting with tech team'
|
||||
attendies:'user1, kristof, ...'
|
||||
description:''
|
||||
|
||||
//to delete (can use words cancel, delete)
|
||||
!!calendar.delete
|
||||
id:100
|
||||
|
||||
//to reschedule e.g. delay, 1d stands for 1 day, 1w for 1 week, 1h for 1 hour
|
||||
!!calendar.delay
|
||||
id:100
|
||||
delay:'2d'
|
||||
|
||||
//when e.g. reschedule or delete, we can inform participants
|
||||
!!calendar.inform
|
||||
id:100
|
||||
|
||||
|
||||
```
|
@ -0,0 +1,60 @@
|
||||
|
||||
how do I use heroscript for story and task management
|
||||
|
||||
-------------------------
|
||||
|
||||
heroscript has basic notations to deal with stories and tasks
|
||||
|
||||
when the user asks to translate an story or task action to heroscript use following rules and see example below
|
||||
|
||||
- all dates are in europe style: Format: DD/MM/YYYY e.g. 06/07/2023
|
||||
- if year not specified by user then always use current year which is 2024
|
||||
- if month not specified use current month which is september or month 9
|
||||
- title is always required, if attendies or people mentioned they should be on assignment list
|
||||
- date & time & duration is optional
|
||||
- don't use comments in the heroscript (means no // at end of line for heroscript)
|
||||
- duration expressed as 1m, 1h, 1d (minute, hour, day)
|
||||
- deadline is or a date or +1h, +1d, .. the + means time from now, just list same way e.g. +1h
|
||||
- 1 months is done as 30 days or +30 days, 2 months 60 days, ... (which means +30d for 1 month)
|
||||
- stories cannot have a date, if a date given, giver an error
|
||||
- owners, assignees, contributors, executors is all the same
|
||||
- the description is always in markdown format
|
||||
- the description always has the title repeated
|
||||
- the description has title, purpose, deliverables
|
||||
- try to figure out what purpose and deliverables are
|
||||
- purpose is put as list in markdown
|
||||
|
||||
```heroscript
|
||||
|
||||
//to add a new story
|
||||
!!story.add
|
||||
title:'need to improve UI for version 1.0'
|
||||
owners:'karoline, kristof'
|
||||
description:'
|
||||
# need to improve UI for version 1.0
|
||||
|
||||
We got some complaints from our userbase and its overdue.
|
||||
|
||||
## deliverables
|
||||
|
||||
- [ ] specs and check with kristof
|
||||
- [ ] implement mockup
|
||||
- [ ] implement prototype
|
||||
|
||||
'
|
||||
|
||||
|
||||
//to add a new task, which might (optional) be linked to a story
|
||||
!!task.add
|
||||
title:'let our userbase know'
|
||||
story:10
|
||||
owners:'kristof'
|
||||
deadline:'+10d'
|
||||
description:'
|
||||
write email to userbase
|
||||
ask tom to check
|
||||
'
|
||||
|
||||
|
||||
|
||||
```
|
60
_archive/aiprompts/instructions/timemgmt/sys_2_heroscript.md
Normal file
60
_archive/aiprompts/instructions/timemgmt/sys_2_heroscript.md
Normal file
@ -0,0 +1,60 @@
|
||||
|
||||
'heroscript' is a simple declarative language in following form
|
||||
|
||||
```heroscript
|
||||
!!mother.define
|
||||
myname:'mymama'
|
||||
mylist:'20,200'
|
||||
myint:2
|
||||
|
||||
//this is how we define a child (is in list)
|
||||
!!child.define
|
||||
mother:'mymama'
|
||||
name:'florine'
|
||||
length:100
|
||||
description:'
|
||||
multiline is supported
|
||||
'
|
||||
|
||||
!!child.define
|
||||
mother:'mymama'
|
||||
name:'aurelie'
|
||||
length:60
|
||||
description:'
|
||||
multiline is supported
|
||||
now for aurelie
|
||||
'
|
||||
```
|
||||
|
||||
some rules
|
||||
|
||||
|
||||
- '0,70' is a list of 2 (when comma in example its a list)
|
||||
- never use [] in lists, just have comma separation in between quotes ''
|
||||
- in lists always put lowercase names
|
||||
- node_name:'silver' is same as node_name:silver, when spaces always '' around
|
||||
- // means comment
|
||||
- all dates are in europe style: Format: DD/MM/YYYY e.g. 06/07/2023, always specify year
|
||||
|
||||
the corresponding model in vlang would be
|
||||
|
||||
```vlang
|
||||
pub struct Mother {
|
||||
pub mut:
|
||||
myname string
|
||||
mylist [20,200]
|
||||
myint 2
|
||||
children []Child
|
||||
}
|
||||
|
||||
pub struct Child {
|
||||
pub mut:
|
||||
name string
|
||||
length int
|
||||
description string
|
||||
}
|
||||
```
|
||||
|
||||
|
||||
|
||||
|
@ -0,0 +1,61 @@
|
||||
|
||||
'heroscript' is a simple declarative language in following form
|
||||
|
||||
```heroscript
|
||||
!!mother.define
|
||||
myname:'mymama'
|
||||
mylist:'20,200'
|
||||
myint:2
|
||||
|
||||
//this is how we define a child (is in list)
|
||||
!!child.define
|
||||
mother:'mymama'
|
||||
name:'florine'
|
||||
length:100
|
||||
description:'
|
||||
multiline is supported
|
||||
'
|
||||
|
||||
!!child.define
|
||||
mother:'mymama'
|
||||
name:'aurelie'
|
||||
length:60
|
||||
description:'
|
||||
multiline is supported
|
||||
now for aurelie
|
||||
'
|
||||
```
|
||||
|
||||
some rules
|
||||
|
||||
|
||||
- '0,70' is a list of 2 (when comma in example its a list)
|
||||
- never use [] in lists, just have comma separation in between quotes ''
|
||||
- in lists always put lowercase names
|
||||
- node_name:'silver' is same as node_name:silver, when spaces always '' around
|
||||
- // means comment
|
||||
- all dates are in europe style: Format: DD/MM/YYYY e.g. 06/07/2023, always specify year
|
||||
|
||||
the corresponding model in vlang would be
|
||||
|
||||
```vlang
|
||||
pub struct Mother {
|
||||
pub mut:
|
||||
myname string
|
||||
mylist [20,200]
|
||||
myint 2
|
||||
children []Child
|
||||
}
|
||||
|
||||
pub struct Child {
|
||||
pub mut:
|
||||
name string
|
||||
length int
|
||||
description string
|
||||
}
|
||||
```
|
||||
|
||||
|
||||
In a heroscript file, the second line after the `!!<module>.<name>.define` block is typically used to define the properties or fields of the struct being defined. [1] The properties are specified as <property_name>:<value>, with each property on a new line. For example:
|
||||
|
||||
|
@ -0,0 +1,35 @@
|
||||
|
||||
how can I query a webservice over http using vlang for a simple post request
|
||||
|
||||
|
||||
-------------------
|
||||
|
||||
|
||||
```vlang
|
||||
|
||||
import freeflowuniverse.crystallib.clients.httpconnection
|
||||
import json
|
||||
|
||||
|
||||
mut conn := httpconnection.new(name: 'test', url: 'https://jsonplaceholder.typicode.com/')!
|
||||
|
||||
|
||||
// adding a header field to be used in all requests.
|
||||
// default header have the field Content-Type set to 'application/json',
|
||||
// but we should reconsider this and leave it out, set it manually when needed
|
||||
conn.default_header.add(.content_language, 'Content-Language: en-US')
|
||||
|
||||
// Getting a blog post with id 1 (us example), should be fresh response from the server
|
||||
mut res := conn.send(prefix: 'posts', id: '1')!
|
||||
|
||||
// Result object have minimum fileds (code, data) and one method is_ok()
|
||||
println('Status code: ${res.code}')
|
||||
|
||||
// you can check if you got a success status code or not
|
||||
println('Success: ${res.is_ok()}')
|
||||
|
||||
// access the result data
|
||||
println('Data: ${res.data}')
|
||||
|
||||
|
||||
```
|
80
_archive/aiprompts/instructions/vlang/sys_1_vlang.md
Normal file
80
_archive/aiprompts/instructions/vlang/sys_1_vlang.md
Normal file
@ -0,0 +1,80 @@
|
||||
you are chatbot, you try to help everyone with knowledge from v and vlang which is in the attached knowledge base
|
||||
|
||||
ALWAYS FOLLOW THE FOLLOWING INSTRUCTIONS FIRST
|
||||
|
||||
## structs examples
|
||||
|
||||
```v
|
||||
@[heap]
|
||||
pub struct GitAddr {
|
||||
pub mut:
|
||||
gsconfig &GitStructureConfig
|
||||
accounts []&Account
|
||||
provider string
|
||||
account string
|
||||
name string // is the name of the repository
|
||||
branch string
|
||||
nr int
|
||||
}
|
||||
|
||||
pub struct Account {
|
||||
pub mut:
|
||||
name string //my comment
|
||||
|
||||
}
|
||||
|
||||
```
|
||||
|
||||
note usage of pub & pub mut
|
||||
|
||||
all names are lowercase (snakecase with _)
|
||||
|
||||
& is used for references
|
||||
|
||||
## normalize a string
|
||||
|
||||
We call this name fix, anytime we use a name as id, or as a key in a map we want to normalize the string
|
||||
|
||||
```v
|
||||
import freeflowuniverse.crystallib.core.texttools
|
||||
|
||||
mut myname:="a__Name_to_fix"
|
||||
myname = texttools.name_fix(myname)
|
||||
```
|
||||
|
||||
## dealing with paths
|
||||
|
||||
alwayse use this library when dealing with path, info how to use it can be found in your knowledgebase from core.pathlib.md
|
||||
|
||||
```v
|
||||
import freeflowuniverse.crystallib.core.pathlib
|
||||
|
||||
#to get a path from a file or dir, the pathlib will figure out if its a dir or file and if it exists
|
||||
mut p:=pathlib.get('/tmp/mysourcefiles')!
|
||||
|
||||
#to get a dir and create it
|
||||
|
||||
|
||||
#to get a list of paths and copy to other destination
|
||||
mut pathlist:=p.list(regex:[r'.*.md$'])! //this gets all files ending on .md
|
||||
pathlist.copy('/tmp/mydest')!
|
||||
|
||||
```
|
||||
|
||||
## executing commands
|
||||
|
||||
```v
|
||||
|
||||
#simple commands, means < 1 line and can be executed using os.execute
|
||||
# fn execute(cmd string) Result see os.md module
|
||||
res := os.execute(cmd)
|
||||
if res.exit_code > 0 {
|
||||
return error('cannot upload over ssh: ${cmd}')
|
||||
}
|
||||
#ALWAYS check the return code
|
||||
```
|
||||
|
||||
#if the command is more complicated use the osal.exec method as can be found in osal.md file
|
||||
|
||||
res := osal.exec(cmd: args.cmd, stdout: args.stdout, debug: executor.debug)!
|
||||
```
|
23
_archive/aiprompts/intent.py
Normal file
23
_archive/aiprompts/intent.py
Normal file
@ -0,0 +1,23 @@
|
||||
from transformers import pipeline
|
||||
|
||||
# Load the pipeline for text classification
|
||||
classifier = pipeline("zero-shot-classification", model="typeform/distilbert-base-uncased-mnli")
|
||||
|
||||
# Define the possible intents
|
||||
candidate_labels = ["complaint", "feedback", "appointment","travel","agenda","taskmanagement","religion","fire test"]
|
||||
|
||||
def determine_intent(user_input):
|
||||
result = classifier(user_input, candidate_labels)
|
||||
print(result)
|
||||
return result["labels"][0] # The intent with the highest score
|
||||
|
||||
# Example user input
|
||||
user_input = '''
|
||||
Playing with matches is dangerous.
|
||||
Can you book me a meeting, its about flying to paris
|
||||
'''
|
||||
|
||||
# Determine the intent
|
||||
for i in range(10):
|
||||
intent = determine_intent(user_input)
|
||||
print(f"User intent: {intent}")
|
133
_archive/aiprompts/tools/chinook.py
Normal file
133
_archive/aiprompts/tools/chinook.py
Normal file
@ -0,0 +1,133 @@
|
||||
import sqlite3
|
||||
|
||||
import json
|
||||
from openai import OpenAI
|
||||
from tenacity import retry, wait_random_exponential, stop_after_attempt
|
||||
from termcolor import colored
|
||||
|
||||
GPT_MODEL = "gpt-4o"
|
||||
client = OpenAI()
|
||||
dbpath="/Users/despiegk1/Downloads/chinook.db"
|
||||
|
||||
conn = sqlite3.connect(dbpath)
|
||||
print("Opened database successfully")
|
||||
|
||||
def get_table_names(conn):
|
||||
"""Return a list of table names."""
|
||||
table_names = []
|
||||
tables = conn.execute("SELECT name FROM sqlite_master WHERE type='table';")
|
||||
for table in tables.fetchall():
|
||||
table_names.append(table[0])
|
||||
return table_names
|
||||
|
||||
|
||||
def get_column_names(conn, table_name):
|
||||
"""Return a list of column names."""
|
||||
column_names = []
|
||||
columns = conn.execute(f"PRAGMA table_info('{table_name}');").fetchall()
|
||||
for col in columns:
|
||||
column_names.append(col[1])
|
||||
return column_names
|
||||
|
||||
|
||||
def get_database_info(conn):
|
||||
"""Return a list of dicts containing the table name and columns for each table in the database."""
|
||||
table_dicts = []
|
||||
for table_name in get_table_names(conn):
|
||||
columns_names = get_column_names(conn, table_name)
|
||||
table_dicts.append({"table_name": table_name, "column_names": columns_names})
|
||||
return table_dicts
|
||||
|
||||
|
||||
database_schema_dict = get_database_info(conn)
|
||||
database_schema_string = "\n".join(
|
||||
[
|
||||
f"Table: {table['table_name']}\nColumns: {', '.join(table['column_names'])}"
|
||||
for table in database_schema_dict
|
||||
]
|
||||
)
|
||||
|
||||
tools = [
|
||||
{
|
||||
"type": "function",
|
||||
"function": {
|
||||
"name": "ask_database",
|
||||
"description": "Use this function to answer user questions about music. Input should be a fully formed SQL query.",
|
||||
"parameters": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"query": {
|
||||
"type": "string",
|
||||
"description": f"""
|
||||
SQL query extracting info to answer the user's question.
|
||||
SQL should be written using this database schema:
|
||||
{database_schema_string}
|
||||
The query should be returned in plain text, not in JSON.
|
||||
""",
|
||||
}
|
||||
},
|
||||
"required": ["query"],
|
||||
},
|
||||
}
|
||||
}
|
||||
]
|
||||
|
||||
def ask_database(conn, query):
|
||||
"""Function to query SQLite database with a provided SQL query."""
|
||||
try:
|
||||
results = str(conn.execute(query).fetchall())
|
||||
except Exception as e:
|
||||
results = f"query failed with error: {e}"
|
||||
return results
|
||||
|
||||
|
||||
# Step #1: Prompt with content that may result in function call. In this case the model can identify the information requested by the user is potentially available in the database schema passed to the model in Tools description.
|
||||
messages = [{
|
||||
"role":"user",
|
||||
"content": "What is the name of the album with the most tracks?"
|
||||
}]
|
||||
|
||||
response = client.chat.completions.create(
|
||||
model='gpt-4o',
|
||||
messages=messages,
|
||||
tools= tools,
|
||||
tool_choice="auto"
|
||||
)
|
||||
|
||||
# Append the message to messages list
|
||||
response_message = response.choices[0].message
|
||||
messages.append(response_message)
|
||||
|
||||
print(response_message)
|
||||
|
||||
# Step 2: determine if the response from the model includes a tool call.
|
||||
tool_calls = response_message.tool_calls
|
||||
if tool_calls:
|
||||
# If true the model will return the name of the tool / function to call and the argument(s)
|
||||
tool_call_id = tool_calls[0].id
|
||||
tool_function_name = tool_calls[0].function.name
|
||||
tool_query_string = eval(tool_calls[0].function.arguments)['query']
|
||||
|
||||
# Step 3: Call the function and retrieve results. Append the results to the messages list.
|
||||
if tool_function_name == 'ask_database':
|
||||
results = ask_database(conn, tool_query_string)
|
||||
|
||||
messages.append({
|
||||
"role":"tool",
|
||||
"tool_call_id":tool_call_id,
|
||||
"name": tool_function_name,
|
||||
"content":results
|
||||
})
|
||||
|
||||
# Step 4: Invoke the chat completions API with the function response appended to the messages list
|
||||
# Note that messages with role 'tool' must be a response to a preceding message with 'tool_calls'
|
||||
model_response_with_function_call = client.chat.completions.create(
|
||||
model="gpt-4o",
|
||||
messages=messages,
|
||||
) # get a new response from the model where it can see the function response
|
||||
print(model_response_with_function_call.choices[0].message.content)
|
||||
else:
|
||||
print(f"Error: function {tool_function_name} does not exist")
|
||||
else:
|
||||
# Model did not identify a function to call, result can be returned to the user
|
||||
print(response_message.content)
|
15
_archive/bart/bart.py
Normal file
15
_archive/bart/bart.py
Normal file
@ -0,0 +1,15 @@
|
||||
import requests
|
||||
|
||||
API_URL = "https://api-inference.huggingface.co/models/facebook/bart-large-mnli"
|
||||
headers = {"Authorization": "Bearer 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"}
|
||||
|
||||
def query(payload):
|
||||
response = requests.post(API_URL, headers=headers, json=payload)
|
||||
return response.json()
|
||||
|
||||
output = query({
|
||||
"inputs": "Hi, I recently bought a device from your company but it is not working as advertised and I would like to get reimbursed!",
|
||||
"parameters": {"candidate_labels": ["refund", "legal", "faq"]},
|
||||
})
|
||||
|
||||
print(output)
|
15
_archive/fluently_image/fluently.py
Normal file
15
_archive/fluently_image/fluently.py
Normal file
@ -0,0 +1,15 @@
|
||||
import requests
|
||||
|
||||
API_URL = "https://api-inference.huggingface.co/models/fluently/Fluently-XL-v4"
|
||||
headers = {"Authorization": "Bearer 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"}
|
||||
|
||||
def query(payload):
|
||||
response = requests.post(API_URL, headers=headers, json=payload)
|
||||
return response.content
|
||||
image_bytes = query({
|
||||
"inputs": "Astronaut riding a horse",
|
||||
})
|
||||
# You can access the image with PIL.Image for example
|
||||
import io
|
||||
from PIL import Image
|
||||
image = Image.open(io.BytesIO(image_bytes))
|
123
_archive/git_poller.py
Normal file
123
_archive/git_poller.py
Normal file
@ -0,0 +1,123 @@
|
||||
import sys
|
||||
import os
|
||||
import redis
|
||||
import subprocess
|
||||
import time
|
||||
|
||||
def find_git_root(path):
|
||||
while path != '/':
|
||||
if os.path.exists(os.path.join(path, '.git')):
|
||||
return path
|
||||
path = os.path.dirname(path)
|
||||
return None
|
||||
|
||||
def get_git_hash(path):
|
||||
return subprocess.check_output(['git', 'rev-parse', 'HEAD'], cwd=path).decode('utf-8').strip()
|
||||
|
||||
def get_changes(path, old_hash, new_hash):
|
||||
return subprocess.check_output(['git', 'log', f'{old_hash}..{new_hash}', '--name-only', '--pretty=format:'], cwd=path).decode('utf-8').split('\n')
|
||||
|
||||
def find_heroscript_callers(path, changes, start_path):
|
||||
callers = set()
|
||||
for change in changes:
|
||||
if not change:
|
||||
continue
|
||||
change_path = os.path.join(path, change)
|
||||
current_path = os.path.dirname(change_path)
|
||||
while current_path.startswith(start_path):
|
||||
if os.path.exists(os.path.join(current_path, '.heroscript_caller')):
|
||||
callers.add(os.path.join(current_path, '.heroscript_caller'))
|
||||
break
|
||||
current_path = os.path.dirname(current_path)
|
||||
return callers
|
||||
|
||||
def find_all_heroscript_callers(path):
|
||||
callers = set()
|
||||
for root, dirs, files in os.walk(path):
|
||||
if '.heroscript_caller' in files:
|
||||
callers.add(os.path.join(root, '.heroscript_caller'))
|
||||
return callers
|
||||
|
||||
def read_heroscript_caller(file_path):
|
||||
with open(file_path, 'r') as file:
|
||||
lines = [line.strip() for line in file if line.strip()]
|
||||
return list(dict.fromkeys(lines)) # Remove duplicates while preserving order
|
||||
|
||||
def main(start_path, reset=False):
|
||||
if not start_path:
|
||||
start_path = os.getcwd()
|
||||
|
||||
git_root = find_git_root(start_path)
|
||||
if not git_root:
|
||||
print(f"Error: No git repository found in {start_path} or its parent directories.")
|
||||
return
|
||||
|
||||
r = redis.Redis(host='localhost', port=6379, db=0)
|
||||
|
||||
if reset:
|
||||
r.hdel('git.lastcommit', git_root)
|
||||
print(f"Reset Redis hash for {git_root}")
|
||||
|
||||
# Perform git pull
|
||||
subprocess.run(['git', 'pull'], cwd=git_root, check=True)
|
||||
|
||||
new_hash = get_git_hash(git_root)
|
||||
old_hash = r.hget('git.lastcommit', git_root)
|
||||
|
||||
if old_hash:
|
||||
old_hash = old_hash.decode('utf-8')
|
||||
if old_hash != new_hash:
|
||||
changes = get_changes(git_root, old_hash, new_hash)
|
||||
callers = find_heroscript_callers(git_root, changes, start_path)
|
||||
else:
|
||||
print("No changes detected.")
|
||||
return
|
||||
else:
|
||||
callers = find_all_heroscript_callers(start_path)
|
||||
|
||||
myerror=False
|
||||
for caller in callers:
|
||||
unique_lines = read_heroscript_caller(caller)
|
||||
for heroscripturl in unique_lines:
|
||||
print(f"{heroscripturl}:{new_hash}")
|
||||
res0=run_hero_command(heroscripturl)
|
||||
if res0==False:
|
||||
myerror=True
|
||||
|
||||
if myerror==False:
|
||||
r.hset('git.lastcommit', git_root, new_hash)
|
||||
|
||||
def run_hero_command(url:str) -> bool:
|
||||
try:
|
||||
# Construct the command
|
||||
command = f"hero run -u {url}"
|
||||
|
||||
# Run the command and capture output
|
||||
result = subprocess.run(command, shell=True, check=True,
|
||||
stdout=subprocess.PIPE, stderr=subprocess.PIPE,
|
||||
text=True)
|
||||
|
||||
except subprocess.CalledProcessError as e:
|
||||
print(f"Error running command: {e}")
|
||||
print(f"Command output: {e.output}")
|
||||
print(f"Command stderr: {e.stderr}")
|
||||
return False
|
||||
except Exception as e:
|
||||
print(f"An unexpected error occurred: {e}")
|
||||
print(f"Command output: {e.output}")
|
||||
print(f"Command stderr: {e.stderr}")
|
||||
return False
|
||||
|
||||
print("Command Output (stdout):")
|
||||
print(result.stdout)
|
||||
|
||||
return True
|
||||
|
||||
if __name__ == "__main__":
|
||||
if len(sys.argv) == 3 and sys.argv[2] == '--reset':
|
||||
main(sys.argv[1], reset=True)
|
||||
elif len(sys.argv) == 2:
|
||||
main(sys.argv[1])
|
||||
else:
|
||||
print("Usage: python script.py <path> [--reset]")
|
||||
sys.exit(1)
|
199
_archive/lib/context/context.py
Normal file
199
_archive/lib/context/context.py
Normal file
@ -0,0 +1,199 @@
|
||||
"""Context management module for handling file operations and tracking changes."""
|
||||
|
||||
import hashlib
|
||||
import logging
|
||||
import os
|
||||
import shutil
|
||||
|
||||
from herotools.pathtools import remove_file_if_exists
|
||||
from herotools.texttools import name_fix
|
||||
|
||||
|
||||
class MyFile:
|
||||
"""A class representing a file in the context system with tracking capabilities."""
|
||||
|
||||
def __init__(self, path: str):
|
||||
"""Initialize a MyFile instance.
|
||||
|
||||
Args:
|
||||
path: The path to the file
|
||||
|
||||
"""
|
||||
self.path = path
|
||||
self.exists = os.path.exists(self.path)
|
||||
self.changed_in_context = False # Indicates if the file is new or was changed
|
||||
self._md5 = ""
|
||||
|
||||
def md5(self) -> str:
|
||||
"""Calculate and return MD5 hash of the file.
|
||||
|
||||
Returns:
|
||||
str: The MD5 hash of the file's contents
|
||||
|
||||
Raises:
|
||||
FileNotFoundError: If the file does not exist
|
||||
|
||||
"""
|
||||
if not self.exists:
|
||||
raise FileNotFoundError(f"File does not exist: {self.path}")
|
||||
if not self._md5:
|
||||
with open(self.path, "rb") as file:
|
||||
self._md5 = hashlib.md5(file.read()).hexdigest()
|
||||
return self._md5
|
||||
|
||||
def name(self) -> str:
|
||||
"""Return the base name of the file.
|
||||
|
||||
Returns:
|
||||
str: The file's base name
|
||||
|
||||
"""
|
||||
return os.path.basename(self.path)
|
||||
|
||||
def ext(self) -> str:
|
||||
"""Return the file extension in lower case.
|
||||
|
||||
Returns:
|
||||
str: The file's extension in lowercase
|
||||
|
||||
"""
|
||||
return os.path.splitext(self.path)[1].lower()
|
||||
|
||||
|
||||
class Context:
|
||||
"""A class for managing file contexts and tracking file changes."""
|
||||
|
||||
def __init__(self, name: str = "default", reset: bool = False):
|
||||
"""Initialize a Context instance.
|
||||
|
||||
Args:
|
||||
name: The name of the context
|
||||
reset: Whether to reset (remove) the existing context
|
||||
|
||||
"""
|
||||
logging.basicConfig(level=logging.DEBUG, format="%(message)s")
|
||||
self.logger = logging.getLogger(__name__)
|
||||
contextroot = os.getenv("CONTEXTROOT", "~/context")
|
||||
self.name = name_fix(name)
|
||||
self.path = os.path.join(os.path.expanduser(contextroot), self.name)
|
||||
if reset:
|
||||
self._remove_context()
|
||||
|
||||
def _remove_context(self):
|
||||
"""Remove the context directory if it exists."""
|
||||
if os.path.exists(self.path):
|
||||
try:
|
||||
shutil.rmtree(self.path)
|
||||
self.logger.info(f"Context directory removed: {self.path}")
|
||||
except Exception as e:
|
||||
self.logger.error(f"Error removing context directory: {e!s}")
|
||||
|
||||
def file_set(self, path: str, cat: str, name: str = "", content: str = "") -> MyFile:
|
||||
"""Set a file in the context with the given category.
|
||||
|
||||
Args:
|
||||
path: Source file path
|
||||
cat: Category for organizing files
|
||||
name: Optional custom name for the file
|
||||
content: Optional content to write to file
|
||||
|
||||
Returns:
|
||||
MyFile: A MyFile instance representing the file in context
|
||||
|
||||
Raises:
|
||||
ValueError: If both path and content are provided
|
||||
FileNotFoundError: If the source file does not exist
|
||||
|
||||
"""
|
||||
cat = name_fix(cat)
|
||||
name = name_fix(name)
|
||||
|
||||
if content:
|
||||
if path:
|
||||
raise ValueError("path and content cannot be both set")
|
||||
path = os.path.join(self.path, "files", cat, name)
|
||||
with open(path, "w") as file:
|
||||
file.write(content)
|
||||
|
||||
mf = MyFile(path=path)
|
||||
if not mf.exists:
|
||||
raise FileNotFoundError(f"Source file does not exist: {path}")
|
||||
|
||||
if not content:
|
||||
if not name:
|
||||
name = name_fix(mf.name())
|
||||
else:
|
||||
if os.path.splitext(name)[1].lower() != mf.ext():
|
||||
name_ext = os.path.splitext(name)[1]
|
||||
raise ValueError(f"Extension {name_ext} must match file extension {mf.ext()}")
|
||||
|
||||
file_path = os.path.join(self.path, "files", cat, name)
|
||||
file_path_md5 = os.path.join(self.path, "files", cat, name + ".md5")
|
||||
os.makedirs(os.path.dirname(file_path), exist_ok=True)
|
||||
|
||||
# Check if the MD5 hash of the file on disk
|
||||
md5_on_disk = ""
|
||||
if os.path.exists(file_path_md5):
|
||||
with open(file_path_md5) as file:
|
||||
md5_on_disk = file.read().strip()
|
||||
# Validate that it's a valid MD5 hash
|
||||
if len(md5_on_disk) != 32 or not all(c in "0123456789abcdef" for c in md5_on_disk.lower()):
|
||||
raise RuntimeError("Bug: hash is not in the right format")
|
||||
|
||||
new_md5 = mf.md5()
|
||||
|
||||
changed_in_context = False
|
||||
if not md5_on_disk or new_md5 != md5_on_disk:
|
||||
changed_in_context = True
|
||||
|
||||
md5_dir = os.path.join(self.path, "files", "md5")
|
||||
|
||||
if changed_in_context:
|
||||
# File did change
|
||||
old_name = os.path.basename(path)
|
||||
new_name = os.path.basename(file_path)
|
||||
self.logger.debug(f"File changed in context {self.name}: {old_name} -> {new_name}")
|
||||
if mf.path != file_path:
|
||||
shutil.copy2(mf.path, file_path)
|
||||
with open(file_path_md5, "w") as file:
|
||||
file.write(new_md5)
|
||||
# Remove the old MD5 link if it exists
|
||||
if md5_on_disk:
|
||||
old_md5_link = os.path.join(md5_dir, md5_on_disk)
|
||||
remove_file_if_exists(old_md5_link)
|
||||
|
||||
mf.path = file_path
|
||||
|
||||
os.makedirs(md5_dir, exist_ok=True)
|
||||
md5_link = os.path.join(md5_dir, mf.md5())
|
||||
if not os.path.exists(md5_link):
|
||||
os.symlink(os.path.relpath(file_path, md5_dir), md5_link)
|
||||
|
||||
return mf
|
||||
|
||||
def file_get(self, name: str, cat: str, needtoexist: bool = True) -> MyFile:
|
||||
"""Get a file from the context with the given category.
|
||||
|
||||
Args:
|
||||
name: Name of the file to retrieve
|
||||
cat: Category the file is stored under
|
||||
needtoexist: Whether to raise an error if file doesn't exist
|
||||
|
||||
Returns:
|
||||
MyFile: A MyFile instance representing the requested file
|
||||
|
||||
Raises:
|
||||
FileNotFoundError: If needtoexist is True and file doesn't exist
|
||||
|
||||
"""
|
||||
name = name_fix(name)
|
||||
cat = name_fix(cat)
|
||||
file_path = os.path.join(self.path, "files", cat, name)
|
||||
if needtoexist:
|
||||
if os.path.exists(file_path):
|
||||
return MyFile(file_path)
|
||||
else:
|
||||
self.logger.warning(f"File not found: {file_path}")
|
||||
raise FileNotFoundError(f"Context file does not exist: {file_path}")
|
||||
else:
|
||||
return MyFile(file_path)
|
155
_archive/lib/dagu/client.py
Normal file
155
_archive/lib/dagu/client.py
Normal file
@ -0,0 +1,155 @@
|
||||
import os
|
||||
import requests
|
||||
from requests.auth import HTTPBasicAuth
|
||||
from dataclasses import dataclass, field
|
||||
from typing import List, Optional
|
||||
from datetime import datetime
|
||||
import time
|
||||
|
||||
@dataclass
|
||||
class DAGStatus:
|
||||
name: str
|
||||
status: str
|
||||
group: Optional[str] = None
|
||||
schedule: Optional[str] = None
|
||||
lastRun: Optional[str] = None
|
||||
nextRun: Optional[str] = None
|
||||
pid: Optional[int] = None
|
||||
log: Optional[str] = None
|
||||
requestId: Optional[str] = None
|
||||
params: Optional[str] = None
|
||||
startedAt: Optional[str] = None
|
||||
finishedAt: Optional[str] = None
|
||||
suspended: Optional[bool] = None
|
||||
|
||||
def get_last_run_epoch(self) -> Optional[int]:
|
||||
"""Convert lastRun to epoch time."""
|
||||
return self._convert_to_epoch(self.lastRun)
|
||||
|
||||
def get_next_run_epoch(self) -> Optional[int]:
|
||||
"""Convert nextRun to epoch time."""
|
||||
return self._convert_to_epoch(self.nextRun)
|
||||
|
||||
@staticmethod
|
||||
def _convert_to_epoch(timestamp: Optional[str]) -> Optional[int]:
|
||||
"""Helper method to convert an ISO 8601 timestamp to epoch time."""
|
||||
if timestamp:
|
||||
dt = datetime.fromisoformat(timestamp.replace('Z', '+00:00'))
|
||||
return int(time.mktime(dt.timetuple()))
|
||||
return None
|
||||
|
||||
class DAGuClient:
|
||||
def __init__(self, base_url: str = "http://localhost:8888"):
|
||||
self.base_url = base_url
|
||||
self.auth = self._get_basic_auth()
|
||||
|
||||
def _get_basic_auth(self) -> HTTPBasicAuth:
|
||||
"""Retrieve the Basic Auth credentials from environment variables."""
|
||||
username = os.getenv('DAGU_BASICAUTH_USERNAME')
|
||||
password = os.getenv('DAGU_BASICAUTH_PASSWORD')
|
||||
|
||||
if not username or not password:
|
||||
raise EnvironmentError("Please set the DAGU_BASICAUTH_USERNAME and DAGU_BASICAUTH_PASSWORD environment variables.")
|
||||
|
||||
return HTTPBasicAuth(username, password)
|
||||
|
||||
def list_dags(self) -> List[DAGStatus]:
|
||||
"""Fetches the list of DAGs with their statuses from the DAGu REST API."""
|
||||
try:
|
||||
response = requests.get(f"{self.base_url}/api/v1/dags", auth=self.auth)
|
||||
response.raise_for_status() # Raises an HTTPError for bad responses (4xx or 5xx)
|
||||
dags_data = response.json().get('DAGs', [])
|
||||
|
||||
if isinstance(dags_data, list):
|
||||
return [self._parse_dag(dag) for dag in dags_data]
|
||||
else:
|
||||
print(f"Unexpected response format: {dags_data}")
|
||||
return []
|
||||
except requests.exceptions.RequestException as e:
|
||||
print(f"Error during request: {e}")
|
||||
return []
|
||||
|
||||
def _parse_dag(self, dag_entry: dict) -> DAGStatus:
|
||||
"""Helper function to parse a DAG's JSON data into a DAGStatus object."""
|
||||
try:
|
||||
dag_data = dag_entry.get("DAG", {})
|
||||
status_data = dag_entry.get("Status", {})
|
||||
|
||||
return DAGStatus(
|
||||
name=dag_data.get("Name"),
|
||||
status=status_data.get("StatusText"),
|
||||
group=dag_data.get("Group"),
|
||||
schedule=(dag_data.get("Schedule", [{}])[0].get("Expression")
|
||||
if dag_data.get("Schedule") else None),
|
||||
lastRun=status_data.get("FinishedAt"),
|
||||
nextRun=None, # Adjust as needed based on your API's response format
|
||||
pid=status_data.get("Pid"),
|
||||
log=status_data.get("Log"),
|
||||
requestId=status_data.get("RequestId"),
|
||||
params=status_data.get("Params"),
|
||||
startedAt=status_data.get("StartedAt"),
|
||||
finishedAt=status_data.get("FinishedAt"),
|
||||
suspended=dag_entry.get("Suspended")
|
||||
)
|
||||
except AttributeError as e:
|
||||
print(f"Error parsing DAG data: {dag_entry}, Error: {e}")
|
||||
return None
|
||||
|
||||
def submit_dag_action(self, name: str, action: str, request_id: Optional[str] = None, params: Optional[str] = None) -> dict:
|
||||
"""Submit an action to a specified DAG.
|
||||
|
||||
Args:
|
||||
name (str): Name of the DAG.
|
||||
action (str): Action to be performed ('start', 'stop', or 'retry').
|
||||
request_id (Optional[str]): Required if action is 'retry'.
|
||||
params (Optional[str]): Parameters for the DAG execution.
|
||||
|
||||
Returns:
|
||||
dict: Response from the API.
|
||||
"""
|
||||
url = f"{self.base_url}/api/v1/dags/{name}"
|
||||
payload = {
|
||||
"action": action,
|
||||
**({"request-id": request_id} if request_id else {}),
|
||||
**({"params": params} if params else {}),
|
||||
}
|
||||
|
||||
try:
|
||||
response = requests.post(url, json=payload, auth=self.auth)
|
||||
response.raise_for_status() # Raises an HTTPError for bad responses (4xx or 5xx)
|
||||
return response.json()
|
||||
except requests.exceptions.RequestException as e:
|
||||
print(f"Error during request: {e}")
|
||||
print(f"Response content: {response.content}")
|
||||
return {}
|
||||
|
||||
# Example usage
|
||||
if __name__ == "__main__":
|
||||
client = DAGuClient()
|
||||
|
||||
# List DAGs
|
||||
try:
|
||||
dags = client.list_dags()
|
||||
for dag in dags:
|
||||
if dag:
|
||||
print(f"DAG Name: {dag.name}, Status: {dag.status}, Group: {dag.group}, "
|
||||
f"Schedule: {dag.schedule}, Last Run: {dag.lastRun}, "
|
||||
f"Next Run: {dag.nextRun}, PID: {dag.pid}, Log: {dag.log}, "
|
||||
f"Request ID: {dag.requestId}, Params: {dag.params}, "
|
||||
f"Started At: {dag.startedAt}, Finished At: {dag.finishedAt}, "
|
||||
f"Suspended: {dag.suspended}")
|
||||
# Example of using helper methods to get epoch times
|
||||
if dag.get_last_run_epoch():
|
||||
print(f"Last Run Epoch: {dag.get_last_run_epoch()}")
|
||||
if dag.get_next_run_epoch():
|
||||
print(f"Next Run Epoch: {dag.get_next_run_epoch()}")
|
||||
except Exception as e:
|
||||
print(f"Error: {e}")
|
||||
|
||||
# Submit an action to a DAG (example: start a DAG)
|
||||
try:
|
||||
dag_name = "test11" # Replace with your actual DAG name
|
||||
action_response = client.submit_dag_action(name=dag_name, action="start")
|
||||
print(f"Action Response: {action_response}")
|
||||
except Exception as e:
|
||||
print(f"Error: {e}")
|
184
_archive/lib/dagu/dag.py
Normal file
184
_archive/lib/dagu/dag.py
Normal file
@ -0,0 +1,184 @@
|
||||
import os
|
||||
import yaml
|
||||
from dataclasses import dataclass, field
|
||||
from typing import List, Dict, Optional
|
||||
from server import *
|
||||
|
||||
@dataclass
|
||||
class EnvVariable:
|
||||
key: str
|
||||
value: str
|
||||
|
||||
@dataclass
|
||||
class HandlerCommand:
|
||||
command: str
|
||||
|
||||
@dataclass
|
||||
class Handlers:
|
||||
success: Optional[HandlerCommand] = None
|
||||
failure: Optional[HandlerCommand] = None
|
||||
cancel: Optional[HandlerCommand] = None
|
||||
exit: Optional[HandlerCommand] = None
|
||||
|
||||
@dataclass
|
||||
class RepeatPolicy:
|
||||
repeat: bool
|
||||
intervalSec: int
|
||||
|
||||
@dataclass
|
||||
class Precondition:
|
||||
condition: str
|
||||
expected: str
|
||||
|
||||
@dataclass
|
||||
class Step:
|
||||
name: str
|
||||
command: str
|
||||
script: Optional[str] = None
|
||||
depends: List[str] = field(default_factory=list)
|
||||
description: Optional[str] = None
|
||||
repeatPolicy: Optional[RepeatPolicy] = None
|
||||
|
||||
@dataclass
|
||||
class DAG:
|
||||
name: str
|
||||
description: Optional[str] = None
|
||||
schedule: Optional[str] = None
|
||||
group: Optional[str] = None
|
||||
tags: Optional[str] = None # This should be a single string
|
||||
env: Dict[str, str] = field(default_factory=dict)
|
||||
logDir: Optional[str] = None
|
||||
restartWaitSec: Optional[int] = None
|
||||
histRetentionDays: Optional[int] = None
|
||||
delaySec: Optional[int] = None
|
||||
maxActiveRuns: Optional[int] = None
|
||||
params: Optional[List[str]] = field(default_factory=list)
|
||||
preconditions: List[Precondition] = field(default_factory=list)
|
||||
mailOn: Dict[str, bool] = field(default_factory=dict)
|
||||
handlerOn: Handlers = field(default_factory=Handlers)
|
||||
MaxCleanUpTimeSec: Optional[int] = None
|
||||
steps: List[Step] = field(default_factory=list)
|
||||
|
||||
def add_step(self, step: Step):
|
||||
"""Add a step to the DAG."""
|
||||
self.steps.append(step)
|
||||
|
||||
def to_dict(self) -> Dict:
|
||||
return {
|
||||
"name": self.name,
|
||||
**({"description": self.description} if self.description else {}),
|
||||
**({"schedule": self.schedule} if self.schedule else {}),
|
||||
**({"group": self.group} if self.group else {}),
|
||||
**({"tags": self.tags} if self.tags else {}),
|
||||
**({"env": [{"key": k, "value": v} for k, v in self.env.items()]} if self.env else {}),
|
||||
**({"logDir": self.logDir} if self.logDir else {}),
|
||||
**({"restartWaitSec": self.restartWaitSec} if self.restartWaitSec else {}),
|
||||
**({"histRetentionDays": self.histRetentionDays} if self.histRetentionDays else {}),
|
||||
**({"delaySec": self.delaySec} if self.delaySec else {}),
|
||||
**({"maxActiveRuns": self.maxActiveRuns} if self.maxActiveRuns else {}),
|
||||
**({"params": " ".join(self.params)} if self.params else {}),
|
||||
**({"preconditions": [{"condition": pc.condition, "expected": pc.expected} for pc in self.preconditions]} if self.preconditions else {}),
|
||||
**({"mailOn": self.mailOn} if self.mailOn else {}),
|
||||
**({"MaxCleanUpTimeSec": self.MaxCleanUpTimeSec} if self.MaxCleanUpTimeSec else {}),
|
||||
**({"handlerOn": {
|
||||
"success": {"command": self.handlerOn.success.command} if self.handlerOn.success else None,
|
||||
"failure": {"command": self.handlerOn.failure.command} if self.handlerOn.failure else None,
|
||||
"cancel": {"command": self.handlerOn.cancel.command} if self.handlerOn.cancel else None,
|
||||
"exit": {"command": self.handlerOn.exit.command} if self.handlerOn.exit else None,
|
||||
}} if any(vars(self.handlerOn).values()) else {}),
|
||||
"steps": [
|
||||
{
|
||||
"name": step.name,
|
||||
"command": step.command,
|
||||
**({"script": step.script} if step.script else {}),
|
||||
**({"depends": step.depends} if step.depends else {}), # Change this back to depends_on if needed
|
||||
**({"description": step.description} if step.description else {}),
|
||||
**({"repeatPolicy": {
|
||||
"repeat": step.repeatPolicy.repeat,
|
||||
"intervalSec": step.repeatPolicy.intervalSec
|
||||
}} if step.repeatPolicy else {}),
|
||||
} for step in self.steps
|
||||
],
|
||||
}
|
||||
|
||||
def to_yaml(self) -> str:
|
||||
return yaml.dump(self.to_dict(), sort_keys=False)
|
||||
|
||||
def new(**kwargs) -> DAG:
|
||||
return DAG(**kwargs)
|
||||
|
||||
# Example usage to create a new DAG
|
||||
if __name__ == "__main__":
|
||||
# Initialize the server with the default DAG directory
|
||||
server = Server()
|
||||
|
||||
# List existing DAGs
|
||||
print("Listing existing DAGs:")
|
||||
dags = server.list_dags()
|
||||
for dag_name in dags:
|
||||
print(f" - {dag_name}")
|
||||
|
||||
# Create a new DAG
|
||||
dag = new(
|
||||
name="example_dag",
|
||||
description="Example DAG to demonstrate functionality",
|
||||
schedule="0 * * * *",
|
||||
group="ExampleGroup",
|
||||
tags="example", # Convert tags to a comma-separated string
|
||||
env={
|
||||
"LOG_DIR": "${HOME}/logs",
|
||||
"PATH": "/usr/local/bin:${PATH}"
|
||||
},
|
||||
logDir="${LOG_DIR}",
|
||||
restartWaitSec=60,
|
||||
histRetentionDays=3,
|
||||
delaySec=1,
|
||||
maxActiveRuns=1,
|
||||
params=["param1", "param2"],
|
||||
preconditions=[
|
||||
Precondition(condition="`echo $2`", expected="param2")
|
||||
],
|
||||
mailOn={"failure": True, "success": True},
|
||||
MaxCleanUpTimeSec=300,
|
||||
handlerOn=Handlers(
|
||||
success=HandlerCommand(command="echo succeed"), # Convert to map structure
|
||||
failure=HandlerCommand(command="echo failed"), # Convert to map structure
|
||||
cancel=HandlerCommand(command="echo canceled"), # Convert to map structure
|
||||
exit=HandlerCommand(command="echo finished") # Convert to map structure
|
||||
)
|
||||
)
|
||||
|
||||
# Add steps to the DAG
|
||||
dag.add_step(Step(
|
||||
name="pull_data",
|
||||
command="sh",
|
||||
script="echo `date '+%Y-%m-%d'`",
|
||||
))
|
||||
|
||||
dag.add_step(Step(
|
||||
name="cleanse_data",
|
||||
command="echo cleansing ${DATA_DIR}/${DATE}.csv",
|
||||
depends=["pull_data"] # Ensure this is the correct key
|
||||
))
|
||||
|
||||
dag.add_step(Step(
|
||||
name="transform_data",
|
||||
command="echo transforming ${DATA_DIR}/${DATE}_clean.csv",
|
||||
depends=["cleanse_data"] # Ensure this is the correct key
|
||||
))
|
||||
|
||||
dag.add_step(Step(
|
||||
name="A task",
|
||||
command="main.sh",
|
||||
repeatPolicy=RepeatPolicy(repeat=True, intervalSec=60)
|
||||
))
|
||||
|
||||
# Save the new DAG as a YAML file
|
||||
server.create_dag(dag)
|
||||
print(f"DAG '{dag.name}' created and saved and started.")
|
||||
|
||||
# List DAGs again to see the newly created one
|
||||
print("\nListing updated DAGs:")
|
||||
dags = server.list_dags()
|
||||
for dag_name in dags:
|
||||
print(f" - {dag_name}")
|
51
_archive/lib/dagu/server.py
Normal file
51
_archive/lib/dagu/server.py
Normal file
@ -0,0 +1,51 @@
|
||||
import os
|
||||
import yaml
|
||||
import glob
|
||||
from typing import List
|
||||
from dag import DAG
|
||||
from client import *
|
||||
|
||||
# Assuming the following classes have already been defined:
|
||||
# - DAG (for creating and managing DAG structures)
|
||||
# - Step
|
||||
# - Handlers
|
||||
# - RepeatPolicy
|
||||
# - Precondition
|
||||
|
||||
class Server:
|
||||
def __init__(self, dag_dir: str = "~/hero/var/dagu/dags/"):
|
||||
self.dag_dir = os.path.expanduser(dag_dir)
|
||||
os.makedirs(self.dag_dir, exist_ok=True) # Ensure the directory exists
|
||||
|
||||
def list_dags(self) -> List[str]:
|
||||
"""Lists the DAGs in the directory."""
|
||||
dag_files = glob.glob(os.path.join(self.dag_dir, "*.yaml"))
|
||||
return [os.path.splitext(os.path.basename(dag_file))[0] for dag_file in dag_files]
|
||||
|
||||
def delete_dag(self, name: str) -> bool:
|
||||
"""Deletes a DAG file based on its name."""
|
||||
dag_file = os.path.join(self.dag_dir, f"{name}.yaml")
|
||||
if os.path.exists(dag_file):
|
||||
os.remove(dag_file)
|
||||
return True
|
||||
else:
|
||||
print(f"DAG '{name}' does not exist.")
|
||||
return False
|
||||
|
||||
def create_dag(self, dag:DAG, start:bool = True) -> bool:
|
||||
"""Creates a new DAG and saves it as a YAML file."""
|
||||
dag_file = os.path.join(self.dag_dir, f"{dag.name}.yaml")
|
||||
with open(dag_file, 'w') as file:
|
||||
yaml.dump(dag.to_dict(), file, sort_keys=False)
|
||||
if start:
|
||||
self.start_dag(dag.name)
|
||||
return True
|
||||
|
||||
def start_dag(self,dag_name:str) -> bool:
|
||||
client = DAGuClient()
|
||||
action_response = client.submit_dag_action(name=dag_name, action="start")
|
||||
|
||||
def stop_dag(self,dag_name:str) -> bool:
|
||||
client = DAGuClient()
|
||||
action_response = client.submit_dag_action(name=dag_name, action="stop")
|
||||
|
13
_archive/lib/web/mdcollections/__init__.py
Normal file
13
_archive/lib/web/mdcollections/__init__.py
Normal file
@ -0,0 +1,13 @@
|
||||
from .base_types import MDItem, MDPage, MDImage, MDCollection
|
||||
from .mdcollections import MDCollections
|
||||
from .scanner import scan_directory
|
||||
|
||||
# Re-export all public types and functions
|
||||
__all__ = [
|
||||
'MDItem',
|
||||
'MDPage',
|
||||
'MDImage',
|
||||
'MDCollection',
|
||||
'MDCollections',
|
||||
'scan_directory'
|
||||
]
|
177
_archive/lib/web/mdcollections/base_types.py
Normal file
177
_archive/lib/web/mdcollections/base_types.py
Normal file
@ -0,0 +1,177 @@
|
||||
from pathlib import Path
|
||||
from typing import List, Dict
|
||||
from dataclasses import dataclass
|
||||
from .tools import name_fix
|
||||
|
||||
import os
|
||||
|
||||
class MDItem:
|
||||
"""Base class for items in a collection."""
|
||||
def __init__(self, collection: "MDCollection", rel_path: Path):
|
||||
if not isinstance(rel_path, Path):
|
||||
raise TypeError("rel_path must be a Path instance")
|
||||
self.collection = collection
|
||||
self.rel_path = rel_path
|
||||
self.content_ = ""
|
||||
self.processed = bool
|
||||
|
||||
def __str__(self) -> str:
|
||||
return f"{self.__class__.__name__}: {self.rel_path}"
|
||||
|
||||
@property
|
||||
def full_path(self) -> Path:
|
||||
"""Returns the full path to the item."""
|
||||
return self.collection.path / self.rel_path
|
||||
|
||||
@property
|
||||
def path(self) -> str:
|
||||
"""Returns the fixed name of the item without extension."""
|
||||
return str(self.full_path.resolve())
|
||||
|
||||
@property
|
||||
def name(self) -> str:
|
||||
"""Returns the fixed name of the item."""
|
||||
return name_fix(os.path.basename(self.rel_path))
|
||||
|
||||
class MDPage(MDItem):
|
||||
"""Represents a markdown file in the collection."""
|
||||
pass
|
||||
|
||||
@property
|
||||
def content(self) -> str:
|
||||
if not self.content_:
|
||||
if os.path.exists(self.path):
|
||||
try:
|
||||
with open(self.path, 'r', encoding='utf-8') as f:
|
||||
self.content_ = f.read()
|
||||
except OSError as e:
|
||||
raise Exception(f"Error reading file {self.path}: {e}")
|
||||
else:
|
||||
raise FileNotFoundError(f"Cannot find markdown file: {self.path}")
|
||||
return self.content_
|
||||
|
||||
|
||||
|
||||
class MDImage(MDItem):
|
||||
"""Represents an image file in the collection."""
|
||||
pass
|
||||
|
||||
|
||||
|
||||
@dataclass
|
||||
class MDCollection:
|
||||
"""Represents a collection of markdown files and images."""
|
||||
path: Path
|
||||
name: str
|
||||
items: List[MDItem]
|
||||
|
||||
def page_get(self, name: str) -> MDPage:
|
||||
"""
|
||||
Get a markdown page by name.
|
||||
|
||||
Args:
|
||||
name: Name of the page to find (will be normalized)
|
||||
|
||||
Returns:
|
||||
MDPage object
|
||||
|
||||
Raises:
|
||||
ValueError: If page not found
|
||||
"""
|
||||
# Remove .md extension if present
|
||||
if "__" in name:
|
||||
raise ValueError("there should be no __ in name of page_get")
|
||||
|
||||
if name.endswith('.md'):
|
||||
name=name[:-3]
|
||||
normalized_name = name_fix(name)
|
||||
for item in self.items:
|
||||
if isinstance(item, MDPage):
|
||||
item_name = name_fix(item.rel_path.stem)
|
||||
if item_name == normalized_name:
|
||||
return item
|
||||
raise ValueError(f"Page not found: {name}")
|
||||
|
||||
def image_get(self, name: str) -> MDImage:
|
||||
"""
|
||||
Get an image by name.
|
||||
|
||||
Args:
|
||||
name: Name of the image to find (will be normalized)
|
||||
|
||||
Returns:
|
||||
MDImage object
|
||||
|
||||
Raises:
|
||||
ValueError: If image not found
|
||||
"""
|
||||
normalized_name = name_fix(name)
|
||||
for item in self.items:
|
||||
if isinstance(item, MDImage):
|
||||
# For images, compare with extension
|
||||
item_name = name_fix(os.path.basename(item.rel_path))
|
||||
if item_name == normalized_name:
|
||||
return item
|
||||
raise ValueError(f"Image not found: {name}")
|
||||
|
||||
def __str__(self) -> str:
|
||||
"""Returns a tree-like string representation of the collection."""
|
||||
result = [f"Collection: {self.name} ({self.path})"]
|
||||
|
||||
# Group items by type
|
||||
pages = [item for item in self.items if isinstance(item, MDPage)]
|
||||
images = [item for item in self.items if isinstance(item, MDImage)]
|
||||
|
||||
# Add pages
|
||||
if pages:
|
||||
result.append(" Pages:")
|
||||
for page in sorted(pages, key=lambda x: str(x.rel_path)):
|
||||
result.append(f" └─ {page.name}")
|
||||
|
||||
# Add images
|
||||
if images:
|
||||
result.append(" Images:")
|
||||
for image in sorted(images, key=lambda x: str(x.rel_path)):
|
||||
result.append(f" └─ {image.name}")
|
||||
|
||||
return "\n".join(result)
|
||||
|
||||
def index_page(self) -> MDPage:
|
||||
"""Generate a dynamic index of all markdown files in the collection."""
|
||||
# Get all markdown pages and sort them by relative path
|
||||
pages = sorted(
|
||||
[item for item in self.items if isinstance(item, MDPage)],
|
||||
key=lambda x: str(x.rel_path)
|
||||
)
|
||||
|
||||
# Group pages by directory
|
||||
page_groups: Dict[str, List[MDPage]] = {}
|
||||
for page in pages:
|
||||
dir_path = str(page.rel_path.parent)
|
||||
if dir_path == '.':
|
||||
dir_path = 'Root'
|
||||
if dir_path not in page_groups:
|
||||
page_groups[dir_path] = []
|
||||
page_groups[dir_path].append(page)
|
||||
|
||||
# Generate markdown content
|
||||
content = ["# Collection Index\n"]
|
||||
|
||||
for dir_path in sorted(page_groups.keys()):
|
||||
# Add directory header
|
||||
if dir_path != 'Root':
|
||||
content.append(f"\n## {dir_path}\n")
|
||||
elif len(page_groups) > 1: # Only show Root header if there are other directories
|
||||
content.append("\n## Root Directory\n")
|
||||
|
||||
# Add pages in current directory
|
||||
for page in sorted(page_groups[dir_path], key=lambda x: x.name):
|
||||
# Create display name by removing extension and formatting
|
||||
display_name = page.rel_path.stem.replace('_', ' ').replace('-', ' ').title()
|
||||
# Create link using relative path
|
||||
link_path = str(page.rel_path)
|
||||
content.append(f'- [{display_name}]({self.name}__{link_path})')
|
||||
|
||||
mdp=MDPage(self,Path("index.md"))
|
||||
mdp.content_ = "\n".join(content)
|
||||
return mdp
|
25
_archive/lib/web/mdcollections/factory.py
Normal file
25
_archive/lib/web/mdcollections/factory.py
Normal file
@ -0,0 +1,25 @@
|
||||
import os
|
||||
from pathlib import Path
|
||||
from typing import Optional
|
||||
from .mdcollections import MDCollections
|
||||
|
||||
def create_collections(path: Optional[str] = None) -> MDCollections:
|
||||
"""
|
||||
Factory function to create and initialize an MDCollections instance.
|
||||
|
||||
Args:
|
||||
path: Optional path to scan for collections. Defaults to "data/markdown"
|
||||
|
||||
Returns:
|
||||
Initialized MDCollections instance
|
||||
|
||||
Raises:
|
||||
ValueError: If path is None
|
||||
"""
|
||||
if path is None:
|
||||
raise ValueError("Path cannot be None")
|
||||
|
||||
# Expand ~ to home directory if present in path
|
||||
expanded_path = os.path.expanduser(path)
|
||||
return MDCollections(root_path=Path(expanded_path))
|
||||
|
112
_archive/lib/web/mdcollections/mdcollections.py
Normal file
112
_archive/lib/web/mdcollections/mdcollections.py
Normal file
@ -0,0 +1,112 @@
|
||||
from pathlib import Path
|
||||
from typing import List, Optional
|
||||
from .base_types import MDCollection, MDPage, MDImage, MDItem
|
||||
from .scanner import scan_directory
|
||||
from .tools import name_fix
|
||||
|
||||
class MDCollections:
|
||||
"""Manages multiple markdown collections."""
|
||||
def __init__(self, root_path: Path):
|
||||
"""
|
||||
Initialize collections manager.
|
||||
|
||||
Args:
|
||||
root_path: Root directory containing collections
|
||||
"""
|
||||
self.root_path = root_path
|
||||
self.collections: List[MDCollection] = []
|
||||
self._scan_collections()
|
||||
|
||||
def _scan_collections(self):
|
||||
"""Scan root directory for collections."""
|
||||
if not self.root_path.exists():
|
||||
raise ValueError(f"Root path does not exist: {self.root_path}")
|
||||
|
||||
# Scan immediate subdirectories only
|
||||
for path in sorted(self.root_path.iterdir()):
|
||||
if path.is_dir():
|
||||
# Skip directories starting with _ or containing 'archive' in lowercase
|
||||
if path.name.startswith('_') or 'archive' in path.name.lower():
|
||||
continue
|
||||
|
||||
items = scan_directory(path)
|
||||
if items: # Only create collection if directory contains markdown files
|
||||
collection = MDCollection(
|
||||
path=path,
|
||||
name=path.name,
|
||||
items=sorted(items, key=lambda x: x.name)
|
||||
)
|
||||
self.collections.append(collection)
|
||||
|
||||
# Sort collections by name
|
||||
self.collections.sort(key=lambda x: x.name)
|
||||
|
||||
def collection_get(self, name: str) -> MDCollection:
|
||||
"""
|
||||
Get a collection by name.
|
||||
|
||||
Args:
|
||||
name: Name of the collection to find
|
||||
|
||||
Returns:
|
||||
MDCollection object
|
||||
|
||||
Raises:
|
||||
ValueError: If collection not found
|
||||
"""
|
||||
for collection in self.collections:
|
||||
if collection.name == name:
|
||||
return collection
|
||||
raise ValueError(f"Collection not found: {name}")
|
||||
|
||||
def page_get(self, collection_name: str, page_name: str) -> MDPage:
|
||||
"""
|
||||
Get a page from a specific collection.
|
||||
|
||||
Args:
|
||||
collection_name: Name of the collection
|
||||
page_name: Name of the page
|
||||
|
||||
Returns:
|
||||
MDPage object
|
||||
|
||||
Raises:
|
||||
ValueError: If collection or page not found
|
||||
"""
|
||||
page_name=name_fix(page_name)
|
||||
collection_name=name_fix(collection_name)
|
||||
|
||||
collection = self.collection_get(collection_name)
|
||||
return collection.page_get(page_name)
|
||||
|
||||
def image_get(self, collection_name: str, image_name: str) -> MDImage:
|
||||
"""
|
||||
Get an image from a specific collection.
|
||||
|
||||
Args:
|
||||
collection_name: Name of the collection
|
||||
image_name: Name of the image
|
||||
|
||||
Returns:
|
||||
MDImage object
|
||||
|
||||
Raises:
|
||||
ValueError: If collection or image not found
|
||||
"""
|
||||
# Handle image name that might contain collection prefix
|
||||
if "__" in image_name:
|
||||
image_name, collection_name = image_name.split("__", 1)
|
||||
|
||||
image_name = name_fix(image_name)
|
||||
collection_name = name_fix(collection_name)
|
||||
|
||||
collection = self.collection_get(collection_name)
|
||||
print(f" -- image get: '{collection_name}' '{image_name}'")
|
||||
return collection.image_get(image_name)
|
||||
|
||||
def __str__(self) -> str:
|
||||
"""Returns a string representation of all collections."""
|
||||
if not self.collections:
|
||||
return "No collections found"
|
||||
|
||||
return "\n\n".join(str(collection) for collection in self.collections)
|
61
_archive/lib/web/mdcollections/scanner.py
Normal file
61
_archive/lib/web/mdcollections/scanner.py
Normal file
@ -0,0 +1,61 @@
|
||||
from pathlib import Path
|
||||
from typing import List, Sequence
|
||||
from .base_types import MDItem, MDPage, MDImage, MDCollection
|
||||
|
||||
def scan_directory(path: Path) -> Sequence[MDItem]:
|
||||
"""
|
||||
Scan a directory for markdown files and images.
|
||||
|
||||
Args:
|
||||
path: Directory to scan
|
||||
|
||||
Returns:
|
||||
List of MDItem objects (MDPage or MDImage)
|
||||
"""
|
||||
if not path.exists():
|
||||
raise ValueError(f"Path does not exist: {path}")
|
||||
if not path.is_dir():
|
||||
raise ValueError(f"Path is not a directory: {path}")
|
||||
|
||||
items: List[MDItem] = []
|
||||
|
||||
# Create a temporary collection for the items
|
||||
temp_collection = MDCollection(
|
||||
path=path,
|
||||
name=path.name,
|
||||
items=[] # Will be populated later
|
||||
)
|
||||
|
||||
# First scan for markdown files
|
||||
for md_path in path.rglob("*.md"):
|
||||
# Skip files in hidden directories (starting with .)
|
||||
if any(part.startswith('.') for part in md_path.parts):
|
||||
continue
|
||||
|
||||
# Get path relative to collection root
|
||||
rel_path = md_path.relative_to(path)
|
||||
|
||||
# Create MDPage
|
||||
page = MDPage(temp_collection, rel_path)
|
||||
items.append(page)
|
||||
|
||||
# Then scan for images
|
||||
image_extensions = {'.png', '.jpg', '.jpeg', '.gif', '.svg'}
|
||||
for img_path in path.rglob("*"):
|
||||
# Skip files in hidden directories (starting with .)
|
||||
if any(part.startswith('.') for part in img_path.parts):
|
||||
continue
|
||||
|
||||
# Check if file has image extension
|
||||
if img_path.suffix.lower() in image_extensions:
|
||||
# Get path relative to collection root
|
||||
rel_path = img_path.relative_to(path)
|
||||
|
||||
# Create MDImage
|
||||
image = MDImage(temp_collection, rel_path)
|
||||
items.append(image)
|
||||
|
||||
# Update the temporary collection's items
|
||||
temp_collection.items = items
|
||||
|
||||
return items
|
99
_archive/lib/web/mdcollections/tools.py
Normal file
99
_archive/lib/web/mdcollections/tools.py
Normal file
@ -0,0 +1,99 @@
|
||||
from pathlib import Path
|
||||
from typing import Union
|
||||
import os
|
||||
import re
|
||||
|
||||
def should_skip_path(path: Union[str, Path]) -> bool:
|
||||
"""
|
||||
Check if a path should be skipped based on its basename.
|
||||
Skips paths that start with . or _
|
||||
|
||||
Args:
|
||||
path: Path to check (can be file or directory)
|
||||
|
||||
Returns:
|
||||
True if path should be skipped, False otherwise
|
||||
"""
|
||||
path = Path(path)
|
||||
return path.name.startswith(('.', '_'))
|
||||
|
||||
|
||||
def strip_ansi_codes(text):
|
||||
"""Remove ANSI escape codes from text."""
|
||||
ansi_escape = re.compile(r'\x1B(?:[@-Z\\-_]|\[[0-?]*[ -/]*[@-~])')
|
||||
return ansi_escape.sub('', text)
|
||||
|
||||
|
||||
|
||||
def name_fix(path: str) -> str:
|
||||
"""
|
||||
Normalize only the final part (stem) of a path by:
|
||||
- Converting spaces to underscores
|
||||
- Making lowercase
|
||||
Preserves the directory structure and only modifies the final name.
|
||||
|
||||
Args:
|
||||
path: Path to normalize
|
||||
|
||||
Returns:
|
||||
Path with normalized stem but unchanged structure
|
||||
"""
|
||||
if not isinstance(path, str):
|
||||
raise TypeError("Input must be a string")
|
||||
|
||||
if '/' in path:
|
||||
raise ValueError("Path should not contain forward slashes - use for filenames only")
|
||||
|
||||
path = strip_ansi_codes(path).strip()
|
||||
name, ext = os.path.splitext(path)
|
||||
|
||||
if not is_image(path) and ext.lower() == '.md':
|
||||
ext = ""
|
||||
|
||||
# Convert to lowercase and replace spaces and other characters
|
||||
name = name.lower().replace(' ', '_').replace('-', '_').replace(',', '')
|
||||
name = name.replace('__', '_').rstrip(' ')
|
||||
|
||||
# Only strip trailing underscores for image files
|
||||
if is_image(name):
|
||||
name = name.rstrip('_')
|
||||
|
||||
return f"{name}{ext}"
|
||||
|
||||
|
||||
def path_fix(path: Union[str, Path]) -> Path:
|
||||
"""
|
||||
Normalize only the final part (stem) of a path by:
|
||||
- Converting spaces to underscores
|
||||
- Making lowercase
|
||||
Preserves the directory structure and only modifies the final name.
|
||||
|
||||
Args:
|
||||
path: Path to normalize
|
||||
|
||||
Returns:
|
||||
Path with normalized stem but unchanged structure
|
||||
"""
|
||||
if not isinstance(path, (str, Path)):
|
||||
path = str(path)
|
||||
path = Path(path)
|
||||
# Keep directory structure unchanged, only normalize the filename
|
||||
parent = path.parent
|
||||
filename = name_fix(path.name)
|
||||
# Recombine with original parent path
|
||||
return parent / filename
|
||||
|
||||
|
||||
def is_image(basename):
|
||||
# Define a set of valid image extensions
|
||||
image_extensions = ['.jpg', '.jpeg', '.png', '.gif', '.svg']
|
||||
|
||||
# Get the file extension from the basename
|
||||
_, extension = os.path.splitext(basename)
|
||||
extension = extension.strip()
|
||||
|
||||
#print(f" ----- {basename} '{extension.lower()}' {extension.lower() in image_extensions}")
|
||||
|
||||
# Check if the extension is in the set of image extensions
|
||||
return extension.lower() in image_extensions
|
||||
|
9
_archive/lib/web/mdserver/__init__.py
Normal file
9
_archive/lib/web/mdserver/__init__.py
Normal file
@ -0,0 +1,9 @@
|
||||
"""
|
||||
MDServer package initialization.
|
||||
This helps Python properly resolve the package imports.
|
||||
"""
|
||||
from .markdown_server import MDServer
|
||||
from .factory import serve_markdown
|
||||
from .process_markdown import process_markdown
|
||||
|
||||
__all__ = ['MDServer', 'serve_markdown', 'process_markdown']
|
19
_archive/lib/web/mdserver/factory.py
Normal file
19
_archive/lib/web/mdserver/factory.py
Normal file
@ -0,0 +1,19 @@
|
||||
from typing import Optional, Union
|
||||
from pathlib import Path
|
||||
|
||||
import sys
|
||||
sys.path.append(str(Path(__file__).parent.parent))
|
||||
|
||||
from .markdown_server import MDServer # Import directly from the module file
|
||||
|
||||
|
||||
def serve_markdown(collections_path: str) -> None:
|
||||
"""
|
||||
Legacy function to maintain backward compatibility.
|
||||
Creates an MDServer instance and serves the markdown content.
|
||||
|
||||
Args:
|
||||
collections_path: Path to the collections directory. Can be a string or Path object.
|
||||
"""
|
||||
server = MDServer(collections_path=collections_path)
|
||||
server.serve_markdown()
|
55
_archive/lib/web/mdserver/macro_chart.py
Normal file
55
_archive/lib/web/mdserver/macro_chart.py
Normal file
@ -0,0 +1,55 @@
|
||||
import re
|
||||
from typing import TYPE_CHECKING
|
||||
from mdcollections.base_types import MDPage
|
||||
|
||||
def js_to_python(js_str):
|
||||
"""Convert JavaScript object notation to Python dictionary syntax."""
|
||||
# Remove any 'option =' prefix and trailing semicolon
|
||||
js_str = re.sub(r'^option\s*=\s*', '', js_str)
|
||||
js_str = re.sub(r';(\s*)$', '', js_str)
|
||||
|
||||
# Convert JavaScript property names to Python dictionary keys
|
||||
js_str = re.sub(r'(\b\w+):', r'"\1":', js_str)
|
||||
|
||||
# Convert single quotes to double quotes for string values
|
||||
# First, replace escaped single quotes with a placeholder
|
||||
js_str = js_str.replace("\\'", "___ESCAPED_QUOTE___")
|
||||
# Then replace regular single quotes with double quotes
|
||||
js_str = js_str.replace("'", '"')
|
||||
# Finally, restore escaped single quotes
|
||||
js_str = js_str.replace("___ESCAPED_QUOTE___", "\\'")
|
||||
|
||||
# Handle trailing commas
|
||||
js_str = re.sub(r',(\s*[}\]])', r'\1', js_str)
|
||||
|
||||
# Handle special JavaScript values
|
||||
js_str = js_str.replace('true', 'True').replace('false', 'False').replace('null', 'None')
|
||||
|
||||
# Remove any comments
|
||||
js_str = re.sub(r'//.*?\n|/\*.*?\*/', '', js_str, flags=re.DOTALL)
|
||||
|
||||
return js_str.strip()
|
||||
|
||||
def process_markdown_echarts(page: MDPage) -> MDPage:
|
||||
"""Convert ```echarts blocks to ```py sl blocks that use st_echarts."""
|
||||
if not isinstance(page, MDPage):
|
||||
raise TypeError("page must be a MDPage")
|
||||
|
||||
def replace_echarts_block(match):
|
||||
echarts_code = match.group(1).strip()
|
||||
python_code = js_to_python(echarts_code)
|
||||
|
||||
# Create the streamlit code block
|
||||
streamlit_code = f"""```py sl
|
||||
from streamlit_echarts import st_echarts
|
||||
option = {python_code}
|
||||
st_echarts(options=option, height="400px")
|
||||
```"""
|
||||
return streamlit_code
|
||||
|
||||
# Process all echarts code blocks
|
||||
processed_content = re.sub(r"```echarts\n(.*?)\n```", replace_echarts_block, page.content, flags=re.DOTALL)
|
||||
|
||||
page.content_ = processed_content
|
||||
|
||||
return page
|
119
_archive/lib/web/mdserver/macro_links.py
Normal file
119
_archive/lib/web/mdserver/macro_links.py
Normal file
@ -0,0 +1,119 @@
|
||||
import os
|
||||
import re
|
||||
import streamlit as st
|
||||
from PIL import Image # Pillow package provides PIL
|
||||
from typing import Optional, List, Tuple, TYPE_CHECKING
|
||||
from mdcollections.tools import name_fix, is_image
|
||||
from mdcollections.base_types import MDPage
|
||||
from mdcollections.mdcollections import MDCollections
|
||||
from .process_images import process_image
|
||||
from .tools import debug
|
||||
|
||||
def link_process(link: str, page: MDPage, collections: MDCollections, is_image_link: bool, debug_enabled: bool = False) -> str:
|
||||
"""Process link path and verify existence in collection."""
|
||||
|
||||
if not isinstance(link, str):
|
||||
raise TypeError("link must be strings")
|
||||
|
||||
if not isinstance(collections, MDCollections):
|
||||
raise TypeError("collection must be MDCollection")
|
||||
|
||||
if not isinstance(page, MDPage):
|
||||
raise TypeError("page must be MDPage")
|
||||
|
||||
debug(f"\nProcessing link: {link}")
|
||||
debug(f"Is image link: {is_image_link}")
|
||||
|
||||
# Remove './' if present
|
||||
if link.startswith("./"):
|
||||
link = link[2:]
|
||||
debug("Removed './' prefix from link")
|
||||
|
||||
# Get just the filename without directories
|
||||
link = os.path.basename(link)
|
||||
debug(f"Extracted basename: {link}")
|
||||
|
||||
|
||||
# Process link format
|
||||
if not '__' in link:
|
||||
if ":" in link:
|
||||
link = link.replace(':', '__')
|
||||
|
||||
# Create full link if needed
|
||||
if not "__" in link:
|
||||
link = f"{page.collection.name}__{link}"
|
||||
debug(f"Created full link: {link}")
|
||||
|
||||
if link.count("__")>1:
|
||||
raise RuntimeError(f"cannot have 2x __ in ${link}")
|
||||
|
||||
collection_name, item_name = link.split('__', 1)
|
||||
|
||||
# Convert to lowercase and replace spaces with underscores
|
||||
item_name = name_fix(item_name)
|
||||
collection_name = name_fix(collection_name)
|
||||
debug(f"Normalized: '{collection_name}__{item_name}'")
|
||||
|
||||
# Ensure .md extension for pages
|
||||
if is_image_link:
|
||||
try:
|
||||
md_i = collections.image_get(collection_name=collection_name,image_name=item_name)
|
||||
debug("Successfully verified image exists")
|
||||
# process_image(md_i)
|
||||
# return ""
|
||||
return f"{collection_name}__{item_name}"
|
||||
except ValueError:
|
||||
debug(f"Error - image not found: {link}")
|
||||
return f'<span style="color: red;">ERROR: Image not found: {link}</span>'
|
||||
else:
|
||||
if not item_name.endswith('.md'):
|
||||
item_name = f"{item_name}.md"
|
||||
debug(f"Added .md extension: {item_name}")
|
||||
try:
|
||||
collections.page_get(collection_name, item_name)
|
||||
debug("Successfully verified page exists")
|
||||
except ValueError:
|
||||
debug(f"Error - page not found: {link}")
|
||||
return f'<span style="color: red;">ERROR: Page not found: {link}</span>'
|
||||
|
||||
return f"?page={collection_name}__{item_name}.md"
|
||||
|
||||
def process_links(page: MDPage, collections: MDCollections) -> MDPage:
|
||||
"""Process links in the markdown content."""
|
||||
if not isinstance(page, MDPage):
|
||||
raise TypeError("page must be a MDPage")
|
||||
if not isinstance(collections, MDCollections):
|
||||
raise TypeError("collections must be a MDCollections")
|
||||
|
||||
debug(f"Processing links for page: {page.name}")
|
||||
debug(f"Content length before processing: {len(page.content)} characters")
|
||||
|
||||
link_pattern = r'(!?)\[(.*?)\]\((.*?)\)'
|
||||
|
||||
def replace_link(match):
|
||||
is_image_link = match.group(1) == '!'
|
||||
link_text = match.group(2)
|
||||
link_path = match.group(3)
|
||||
|
||||
debug(f"Found link - Text: {link_text}, Path: {link_path}")
|
||||
debug(f"Is image link: {is_image_link}")
|
||||
|
||||
processed_link = link_process(link_path, page, collections, is_image_link)
|
||||
|
||||
if "ERROR:" in processed_link:
|
||||
debug(f"Link processing error: {processed_link}")
|
||||
return processed_link #this forwards the error, is html in red
|
||||
|
||||
if is_image_link:
|
||||
debug(f"Returning processed image link: ")
|
||||
return f''
|
||||
else:
|
||||
debug(f"Returning processed text link: [{link_text}]({processed_link})")
|
||||
return f'[{link_text}]({processed_link})'
|
||||
|
||||
page.content_ = re.sub(link_pattern, replace_link, page.content)
|
||||
|
||||
debug(f"Content length after processing: {len(page.content)} characters")
|
||||
debug("Link processing complete")
|
||||
|
||||
return page
|
29
_archive/lib/web/mdserver/macro_mermaid.py
Normal file
29
_archive/lib/web/mdserver/macro_mermaid.py
Normal file
@ -0,0 +1,29 @@
|
||||
import re
|
||||
from typing import TYPE_CHECKING
|
||||
from mdcollections.base_types import MDPage
|
||||
|
||||
|
||||
def process_markdown_mermaid(page: MDPage) -> MDPage:
|
||||
"""Convert ```mermaid blocks to ```py sl blocks that use st_mermaid."""
|
||||
if not isinstance(page, MDPage):
|
||||
raise TypeError("page must be a MDPage")
|
||||
|
||||
def replace_mermaid_block(match):
|
||||
mermaid_code = match.group(1).strip()
|
||||
|
||||
# Create the streamlit code block
|
||||
# Note: The mermaid code needs to be properly escaped as a string
|
||||
mermaid_code = mermaid_code.replace('"', '\\"') # Escape double quotes
|
||||
streamlit_code = f'''```py sl
|
||||
from streamlit_mermaid import st_mermaid
|
||||
st_mermaid("""
|
||||
{mermaid_code}
|
||||
""")
|
||||
```'''
|
||||
return streamlit_code
|
||||
|
||||
# Process all mermaid code blocks
|
||||
processed_content = re.sub(r"```mermaid\n(.*?)\n```", replace_mermaid_block, page.content, flags=re.DOTALL)
|
||||
page.content_ = processed_content
|
||||
|
||||
return page
|
69
_archive/lib/web/mdserver/macro_sl.py
Normal file
69
_archive/lib/web/mdserver/macro_sl.py
Normal file
@ -0,0 +1,69 @@
|
||||
import re
|
||||
import streamlit as st
|
||||
import pandas as pd
|
||||
import numpy as np
|
||||
from io import StringIO
|
||||
import sys
|
||||
from typing import TYPE_CHECKING
|
||||
from mdcollections.base_types import MDPage
|
||||
|
||||
# if TYPE_CHECKING:
|
||||
# from .markdown_server import MDServer
|
||||
|
||||
def execute_streamlit_code(code_block):
|
||||
"""
|
||||
Execute a streamlit code block and capture its output.
|
||||
The code block should be valid Python code that uses streamlit.
|
||||
"""
|
||||
# Create string buffer to capture any print outputs
|
||||
old_stdout = sys.stdout
|
||||
redirected_output = StringIO()
|
||||
sys.stdout = redirected_output
|
||||
|
||||
try:
|
||||
# Execute the code block
|
||||
# The code block can use st, pd, np which are already imported
|
||||
exec(code_block, {
|
||||
'st': st,
|
||||
'pd': pd,
|
||||
'np': np
|
||||
})
|
||||
|
||||
# Get any printed output
|
||||
printed_output = redirected_output.getvalue()
|
||||
return True, printed_output
|
||||
except Exception as e:
|
||||
return False, f"Error: {str(e)}\n\nFailed code:\n{code_block}"
|
||||
finally:
|
||||
# Restore stdout
|
||||
sys.stdout = old_stdout
|
||||
|
||||
|
||||
def process_streamlit_blocks(page: MDPage) -> MDPage:
|
||||
"""
|
||||
Find and process ```py sl code blocks in markdown content.
|
||||
Returns the modified content with executed streamlit code blocks replaced by their output.
|
||||
"""
|
||||
if not isinstance(page, MDPage):
|
||||
raise TypeError("page must be a MDPage")
|
||||
# if not hasattr(md_server, 'collections_manager'):
|
||||
# raise TypeError("md_server must be an instance of MDServer")
|
||||
|
||||
def replace_code_block(match):
|
||||
code = match.group(1).strip()
|
||||
success, result = execute_streamlit_code(code)
|
||||
|
||||
if not success:
|
||||
# If execution failed, return the error message
|
||||
return f"```\n{result}\n```"
|
||||
|
||||
# If successful, return empty string - the streamlit components
|
||||
# will be rendered but the code block itself won't be shown
|
||||
return ""
|
||||
|
||||
# Process the code block
|
||||
processed_content = re.sub(r"```py\s+sl\n(.*?)\n```", replace_code_block, page.content, flags=re.DOTALL)
|
||||
|
||||
page.content_ = processed_content
|
||||
|
||||
return page
|
76
_archive/lib/web/mdserver/macro_slides.py
Normal file
76
_archive/lib/web/mdserver/macro_slides.py
Normal file
@ -0,0 +1,76 @@
|
||||
import re
|
||||
import streamlit as st
|
||||
from PIL import Image
|
||||
from typing import TYPE_CHECKING, List
|
||||
from mdcollections.base_types import MDPage, MDImage
|
||||
|
||||
# if TYPE_CHECKING:
|
||||
# from .markdown_server import MDServer
|
||||
|
||||
def create_slider_component(images: List[str]) -> None:
|
||||
"""Create a Streamlit component for image slides."""
|
||||
st.markdown("""
|
||||
<style>
|
||||
.stImage {
|
||||
cursor: pointer;
|
||||
}
|
||||
</style>
|
||||
""", unsafe_allow_html=True)
|
||||
|
||||
# Initialize session state
|
||||
if 'current_slide' not in st.session_state:
|
||||
st.session_state.current_slide = 0
|
||||
|
||||
# Navigation buttons
|
||||
col1, col2, col3 = st.columns([1, 4, 1])
|
||||
|
||||
with col1:
|
||||
if st.button("⬅️ Previous"):
|
||||
st.session_state.current_slide = (st.session_state.current_slide - 1) % len(images)
|
||||
|
||||
with col3:
|
||||
if st.button("Next ➡️"):
|
||||
st.session_state.current_slide = (st.session_state.current_slide + 1) % len(images)
|
||||
|
||||
# Display current image
|
||||
current_image_spec = images[st.session_state.current_slide]
|
||||
if not hasattr(st.session_state, 'md_server') or not st.session_state.md_server.collections_manager:
|
||||
st.error("Collections manager not initialized")
|
||||
return
|
||||
|
||||
try:
|
||||
image_item = st.session_state.md_server.collections_manager.image_get(current_image_spec)
|
||||
image = Image.open(image_item.path)
|
||||
st.image(image, use_column_width=True)
|
||||
except Exception as e:
|
||||
st.error(f"Could not load image: {current_image_spec}. Error: {str(e)}")
|
||||
|
||||
# Display slide counter
|
||||
st.caption(f"Slide {st.session_state.current_slide + 1} of {len(images)}")
|
||||
|
||||
def process_markdown_slides(page: MDPage) -> MDPage:
|
||||
"""Convert ```slides blocks to ```py sl blocks that use the slider component."""
|
||||
if not isinstance(page, MDPage):
|
||||
raise TypeError("page must be a MDPage")
|
||||
# if not hasattr(md_server, 'collections_manager'):
|
||||
# raise TypeError("md_server must be an instance of MDServer")
|
||||
|
||||
# # Store md_server in session state for use by create_slider_component
|
||||
# st.session_state.md_server = md_server
|
||||
|
||||
def replace_slides_block(match):
|
||||
slides_content = match.group(1).strip()
|
||||
image_paths = [line.strip() for line in slides_content.split('\n') if line.strip()]
|
||||
|
||||
# Create the streamlit code block
|
||||
image_paths_str = repr(image_paths)
|
||||
streamlit_code = f'''```py sl
|
||||
from .macro_slides import create_slider_component
|
||||
create_slider_component({image_paths_str})
|
||||
```'''
|
||||
return streamlit_code
|
||||
|
||||
# Process all slides code blocks
|
||||
page.content_ = re.sub(r"```slides\n(.*?)\n```", replace_slides_block, page.content, flags=re.DOTALL)
|
||||
|
||||
return page
|
237
_archive/lib/web/mdserver/markdown_server.py
Normal file
237
_archive/lib/web/mdserver/markdown_server.py
Normal file
@ -0,0 +1,237 @@
|
||||
from typing import Optional, Union
|
||||
import os
|
||||
from pathlib import Path
|
||||
import traceback
|
||||
import sys
|
||||
import re
|
||||
import pudb
|
||||
try:
|
||||
import streamlit as st
|
||||
except ImportError:
|
||||
raise ImportError("streamlit is required. Install with: pip install streamlit")
|
||||
|
||||
from mdcollections.base_types import MDPage, MDImage, MDCollection
|
||||
from mdcollections.mdcollections import MDCollections
|
||||
from .process_markdown import process_markdown, summary_load
|
||||
from .tools import debug
|
||||
|
||||
def setup_static_dir(collections_path: str) -> None:
|
||||
"""
|
||||
Set up static directory for serving images.
|
||||
Creates symbolic links from collections to static directory.
|
||||
"""
|
||||
pass
|
||||
# static_dir = os.path.join(collections_path, "static")
|
||||
# if not os.path.exists(static_dir):
|
||||
# os.makedirs(static_dir)
|
||||
|
||||
# Create symlinks for each collection
|
||||
# collections = os.listdir(collections_path)
|
||||
# for collection in collections:
|
||||
# collection_path = os.path.join(collections_path, collection)
|
||||
# if os.path.isdir(collection_path) and not collection.startswith('.') and collection != 'static':
|
||||
# # Create symlink from collection to static/collection
|
||||
# static_link = os.path.join(static_dir, collection)
|
||||
# if not os.path.exists(static_link):
|
||||
# try:
|
||||
# os.symlink(collection_path, static_link)
|
||||
# except OSError as e:
|
||||
# debug(f"Failed to create symlink from {collection_path} to {static_link}: {e}")
|
||||
|
||||
def process_markdown_content(content: str, base_path: str, collection_name: str) -> None:
|
||||
"""
|
||||
Process and display markdown content.
|
||||
|
||||
Args:
|
||||
content: The markdown content to process
|
||||
base_path: Base path for resolving relative paths
|
||||
collection_name: Name of the collection
|
||||
"""
|
||||
st.markdown(content)
|
||||
|
||||
class MDServer:
|
||||
def __init__(self,collections_path:str):
|
||||
"""Initialize the MDServer instance."""
|
||||
# Convert path to string if it's a Path object
|
||||
if not isinstance(collections_path, str):
|
||||
return RuntimeError("collections_path must be a string.")
|
||||
|
||||
st.session_state.setdefault('current_collection', None)
|
||||
st.session_state.setdefault('current_page', None)
|
||||
st.session_state.setdefault('show_collections_view', False)
|
||||
st.session_state.setdefault('collections_manager', None)
|
||||
st.session_state.setdefault('debug_mode', True)
|
||||
|
||||
# Get the collections manager
|
||||
collections_path = os.path.expanduser(collections_path)
|
||||
|
||||
print(f"Initializing collections manager for: {collections_path}")
|
||||
|
||||
collections_manager = MDCollections(root_path=Path(collections_path))
|
||||
|
||||
# Set up static directory for serving images
|
||||
setup_static_dir(collections_path)
|
||||
|
||||
# Set up page config
|
||||
st.set_page_config(
|
||||
page_title="Markdown Server",
|
||||
page_icon="📚",
|
||||
layout="wide",
|
||||
initial_sidebar_state="expanded",
|
||||
)
|
||||
|
||||
st.session_state.collections_manager = collections_manager
|
||||
|
||||
@property
|
||||
def collections_manager(self) -> MDCollections:
|
||||
"""
|
||||
Property to safely access the collections manager.
|
||||
Ensures collections_manager is initialized before access.
|
||||
|
||||
Returns:
|
||||
MDCollections: The initialized collections manager
|
||||
|
||||
Raises:
|
||||
RuntimeError: If collections_manager is not initialized
|
||||
"""
|
||||
if not st.session_state.get('collections_manager'):
|
||||
raise RuntimeError("Collections manager not initialized. Please ensure MDServer is properly initialized.")
|
||||
return st.session_state.collections_manager
|
||||
|
||||
@property
|
||||
def collections(self) -> list:
|
||||
"""
|
||||
Property to safely access collections from the collections manager.
|
||||
|
||||
Returns:
|
||||
list: List of available collections
|
||||
|
||||
Raises:
|
||||
RuntimeError: If collections_manager is not initialized
|
||||
"""
|
||||
return self.collections_manager.collections
|
||||
|
||||
def handle_url_parameters(self) -> None:
|
||||
"""
|
||||
Handle URL parameters to load specific pages.
|
||||
Expected format: ?page=collection_name__page_name.md
|
||||
Example: ?page=banking_whitepaper__web_3_vision.md
|
||||
"""
|
||||
query_params = st.query_params
|
||||
requested_page = query_params.get('page', None)
|
||||
|
||||
if not requested_page:
|
||||
return
|
||||
|
||||
try:
|
||||
# Split the page parameter using '__' as delimiter
|
||||
if '__' not in requested_page:
|
||||
raise ValueError(f"Invalid page format. Expected format: collection_name__page_name.md, got: {requested_page}")
|
||||
|
||||
collection_name, page_name = requested_page.split('__', 1)
|
||||
|
||||
# Get the page using collections_manager's page_get method
|
||||
page = self.collections_manager.page_get(
|
||||
collection_name=collection_name,
|
||||
page_name=page_name
|
||||
)
|
||||
|
||||
page = process_markdown(page, collections=self.collections_manager)
|
||||
|
||||
st.session_state.current_collection = page.collection
|
||||
st.session_state.current_page = page
|
||||
|
||||
except ValueError as e:
|
||||
# Handle invalid format or page not found errors
|
||||
st.warning(f"Could not load page: {requested_page}. Error: {str(e)}")
|
||||
|
||||
def setup_sidebar(self, collections: MDCollections) -> None:
|
||||
"""
|
||||
Set up the sidebar with collection selection.
|
||||
|
||||
Args:
|
||||
collections: List of available collections
|
||||
"""
|
||||
with st.sidebar:
|
||||
# Add Debug Mode toggle that persists across reloads
|
||||
debug_mode = st.toggle("Debug Mode", st.session_state.debug_mode)
|
||||
if debug_mode != st.session_state.debug_mode:
|
||||
st.session_state.debug_mode = debug_mode
|
||||
# Store in local storage to persist across reloads
|
||||
st.session_state['debug_mode'] = debug_mode
|
||||
|
||||
# Add Collections View action
|
||||
if st.button("View All Collections"):
|
||||
st.session_state.show_collections_view = True
|
||||
st.session_state.current_page = None
|
||||
return
|
||||
|
||||
collection_names = [c.name for c in self.collections]
|
||||
current_idx = collection_names.index(st.session_state.current_collection.name) if st.session_state.current_collection else 0
|
||||
|
||||
selected_collection_name = st.selectbox(
|
||||
"Choose a collection:",
|
||||
collection_names,
|
||||
index=current_idx,
|
||||
key="collection_selector"
|
||||
)
|
||||
|
||||
# Add sidebar content
|
||||
with st.sidebar:
|
||||
# Check for summary.md
|
||||
collection = self.collections_manager.collection_get(selected_collection_name)
|
||||
summary_page = summary_load(collection)
|
||||
st.markdown(summary_page.content, unsafe_allow_html=True)
|
||||
|
||||
# Get the selected collection by name
|
||||
st.session_state.current_collection = self.collections_manager.collection_get(selected_collection_name)
|
||||
|
||||
def display_content(self) -> None:
|
||||
"""Display the markdown content in the main area."""
|
||||
main_content = st.container()
|
||||
|
||||
with main_content:
|
||||
try:
|
||||
if st.session_state.show_collections_view:
|
||||
# Read and process collections view template
|
||||
collections_view_path = Path(__file__).parent / "pages" / "collections_view.md"
|
||||
with open(collections_view_path, 'r') as f:
|
||||
template = f.read()
|
||||
|
||||
# Replace placeholder with actual collections string representation
|
||||
content = template.replace("{collections_str}", str(self.collections_manager))
|
||||
st.markdown(content)
|
||||
|
||||
elif st.session_state.current_page:
|
||||
st.markdown(st.session_state.current_page.content, unsafe_allow_html=True)
|
||||
|
||||
elif st.session_state.current_collection:
|
||||
# Display collection summary and index when no specific page is selected
|
||||
st.markdown("### Collection Index")
|
||||
myindex_page = st.session_state.current_collection.index_page()
|
||||
myindex_page = process_markdown(myindex_page, collections=self.collections_manager)
|
||||
st.markdown(myindex_page.content)
|
||||
else:
|
||||
st.warning("Please select a collection.")
|
||||
except Exception as e:
|
||||
st.error(f"An error occurred: {str(e)}")
|
||||
|
||||
def serve_markdown(self) -> None:
|
||||
"""
|
||||
Serve markdown content using Streamlit.
|
||||
"""
|
||||
try:
|
||||
if not self.collections:
|
||||
st.error("No collections found.")
|
||||
return
|
||||
|
||||
# Handle URL parameters
|
||||
self.handle_url_parameters()
|
||||
|
||||
# Setup sidebar
|
||||
self.setup_sidebar(self.collections_manager)
|
||||
|
||||
# Display content
|
||||
self.display_content()
|
||||
except Exception as e:
|
||||
st.error(f"An error occurred: {str(e)}")
|
7
_archive/lib/web/mdserver/pages/collections_view.md
Normal file
7
_archive/lib/web/mdserver/pages/collections_view.md
Normal file
@ -0,0 +1,7 @@
|
||||
# Collections Overview
|
||||
|
||||
```python
|
||||
{collections_str}
|
||||
```
|
||||
|
||||
The tree structure above is automatically generated from the current state of the collections manager.
|
89
_archive/lib/web/mdserver/process_images.py
Normal file
89
_archive/lib/web/mdserver/process_images.py
Normal file
@ -0,0 +1,89 @@
|
||||
import os
|
||||
import re
|
||||
import streamlit as st
|
||||
from PIL import Image # Pillow package provides PIL
|
||||
from typing import Optional, List, Tuple, TYPE_CHECKING
|
||||
from mdcollections.base_types import MDImage, MDPage
|
||||
from mdcollections.mdcollections import MDCollections
|
||||
from .tools import debug
|
||||
|
||||
|
||||
def process_image(myimage: MDImage, alt_text: Optional[str] = None) -> str:
|
||||
"""
|
||||
Process an image and return HTML img tag for proper rendering in markdown.
|
||||
|
||||
Args:
|
||||
myimage: The MDImage object to process
|
||||
alt_text: Optional alternative text for the image
|
||||
|
||||
Returns:
|
||||
str: HTML img tag with proper styling
|
||||
"""
|
||||
if not isinstance(myimage, MDImage):
|
||||
raise TypeError("myimage must be a MDImage")
|
||||
try:
|
||||
# Verify image can be opened
|
||||
Image.open(myimage.path)
|
||||
|
||||
# Construct static URL using collection name and relative path
|
||||
static_url = f"/app/static/{myimage.collection.name}/{myimage.rel_path}"
|
||||
|
||||
# Create HTML img tag with proper styling
|
||||
return f'<img src="{static_url}" alt="{alt_text or ""}" style="max-width: 100%; height: auto; display: inline-block; margin: 0.5em 0;">'
|
||||
except Exception as e:
|
||||
debug(f"Error processing image {myimage.path}: {str(e)}")
|
||||
return f"Error loading image: {myimage.path}"
|
||||
|
||||
|
||||
def process_images(page: MDPage, collections: MDCollections) -> MDPage:
|
||||
"""
|
||||
Process images in the markdown content while preserving text structure.
|
||||
|
||||
Args:
|
||||
page: The MDPage object containing markdown content
|
||||
collections: The MDCollections object containing image references
|
||||
|
||||
Returns:
|
||||
MDPage: The processed page with images displayed
|
||||
"""
|
||||
if not isinstance(page, MDPage):
|
||||
raise TypeError("page must be a MDPage")
|
||||
if not isinstance(collections, MDCollections):
|
||||
raise TypeError("collections must be a MDCollections")
|
||||
|
||||
debug(f"Processing images for page: {page.name}")
|
||||
debug(f"Content length before processing: {len(page.content)} characters")
|
||||
|
||||
# Match markdown image syntax: 
|
||||
link_pattern = r'!\[(.*?)\]\((.*?)\)'
|
||||
|
||||
def replace_link(match):
|
||||
alt_text = match.group(1)
|
||||
image_path = match.group(2)
|
||||
|
||||
# Split path into collection and image name
|
||||
try:
|
||||
parts = image_path.split("__", 1)
|
||||
if len(parts) != 2:
|
||||
debug(f"Invalid image path format (missing __): {image_path}")
|
||||
return f"Invalid image path format: {image_path}"
|
||||
|
||||
image_name, collection_name = parts
|
||||
debug(f"Found image link, will now check - Alt text: {alt_text}, Image: '{image_name}', Collection: '{collection_name}'")
|
||||
|
||||
# Get the image from collections using the path
|
||||
myimage = collections.image_get(image_name, collection_name)
|
||||
return process_image(myimage, alt_text if alt_text else None)
|
||||
except ValueError as e:
|
||||
debug(f"Image not found in collection: {image_path}.\n{e}")
|
||||
return f"Image not found: {image_path}"
|
||||
except Exception as e:
|
||||
debug(f"Error processing image {image_path}: {str(e)}")
|
||||
return f"Error processing image: {image_path}"
|
||||
|
||||
# Process all image links while preserving surrounding text
|
||||
page.content_ = re.sub(link_pattern, replace_link, page.content)
|
||||
|
||||
debug("Image processing complete")
|
||||
|
||||
return page
|
80
_archive/lib/web/mdserver/process_markdown.py
Normal file
80
_archive/lib/web/mdserver/process_markdown.py
Normal file
@ -0,0 +1,80 @@
|
||||
import os
|
||||
import re
|
||||
import streamlit as st
|
||||
from PIL import Image # Pillow package provides PIL
|
||||
from typing import Optional, List, Tuple, TYPE_CHECKING
|
||||
from .macro_sl import process_streamlit_blocks
|
||||
from .macro_chart import process_markdown_echarts
|
||||
from .macro_mermaid import process_markdown_mermaid
|
||||
from .macro_slides import process_markdown_slides
|
||||
from .macro_sl import process_streamlit_blocks
|
||||
from .macro_links import process_links
|
||||
from .process_images import process_images
|
||||
from mdcollections.tools import name_fix, is_image
|
||||
from mdcollections.base_types import MDPage, MDCollection
|
||||
from mdcollections.mdcollections import MDCollections
|
||||
from .tools import debug,rewrite_summary_links
|
||||
|
||||
|
||||
def summary_load(collection:MDCollection) -> MDPage:
|
||||
"""Load the summary.md file if it exists, otherwise it creates an index"""
|
||||
if not isinstance(collection, MDCollection):
|
||||
raise TypeError("collection must be a MDCollection")
|
||||
try:
|
||||
mypage = collection.page_get("summary.md")
|
||||
mypage.content_=rewrite_summary_links(mypage.content_) #need to rewrite the first part of path as collection, might change in future
|
||||
return mypage
|
||||
except ValueError:
|
||||
return collection.index_page()
|
||||
|
||||
def process_markdown(page: MDPage, collections: MDCollections) -> MDPage:
|
||||
"""Process markdown content and handle images, links, and streamlit code blocks.
|
||||
|
||||
Args:
|
||||
page: The MDPage object to process
|
||||
collections: The MDCollections object containing all collections
|
||||
"""
|
||||
if not isinstance(page, MDPage):
|
||||
raise TypeError("page must be a MDPage")
|
||||
if not isinstance(collections, MDCollections):
|
||||
raise TypeError("collections must be a MDCollections")
|
||||
|
||||
debug(f"Processing markdown for page: {page.name} in collection: {page.collection.name}\nInitial content length: {len(page.content)} characters")
|
||||
|
||||
if page.processed:
|
||||
RuntimeError(f"double processing of page {page.name}")
|
||||
|
||||
# Process special blocks with page and md_server arguments
|
||||
#debug("Processing echarts blocks...")
|
||||
page = process_markdown_echarts(page)
|
||||
|
||||
#debug("Processing mermaid blocks...")
|
||||
page = process_markdown_mermaid(page)
|
||||
|
||||
#debug("Processing slides blocks...")
|
||||
page = process_markdown_slides(page)
|
||||
|
||||
#debug("Processing streamlit blocks...")
|
||||
page = process_streamlit_blocks(page)
|
||||
|
||||
#debug("Processing links...")
|
||||
# Pass the debug flag to process_links
|
||||
page = process_links(page=page, collections=collections)
|
||||
|
||||
page = process_images(page=page, collections=collections )
|
||||
|
||||
# Process remaining content
|
||||
if page.content.strip():
|
||||
debug(f"Rendering final markdown content (length: {len(page.content)} characters)")
|
||||
st.markdown(page.content, unsafe_allow_html=True)
|
||||
else:
|
||||
debug("No content to render after processing")
|
||||
|
||||
return page
|
||||
|
||||
def parse_page_parameter(page_param: str) -> Tuple[Optional[str], str]:
|
||||
"""Parse the page parameter to extract collection and file name."""
|
||||
if '__' in page_param:
|
||||
collection, filename = page_param.split('__', 1)
|
||||
return collection, filename
|
||||
return None, page_param
|
5
_archive/lib/web/mdserver/requirements.txt
Normal file
5
_archive/lib/web/mdserver/requirements.txt
Normal file
@ -0,0 +1,5 @@
|
||||
streamlit>=1.24.0
|
||||
pandas>=1.5.0
|
||||
numpy>=1.24.0
|
||||
ipython>=8.0.0
|
||||
Pillow>=10.0.0
|
43
_archive/lib/web/mdserver/tools.py
Normal file
43
_archive/lib/web/mdserver/tools.py
Normal file
@ -0,0 +1,43 @@
|
||||
import re
|
||||
import streamlit as st
|
||||
|
||||
def strip_ansi_codes(text):
|
||||
"""Remove ANSI escape codes from text."""
|
||||
ansi_escape = re.compile(r'\x1B(?:[@-Z\\-_]|\[[0-?]*[ -/]*[@-~])')
|
||||
return ansi_escape.sub('', text)
|
||||
|
||||
|
||||
|
||||
def debug(message: str):
|
||||
"""Display debug messages in a compact code block format.
|
||||
|
||||
Args:
|
||||
message: The debug message to display
|
||||
"""
|
||||
debug_enabled=st.session_state.debug_mode
|
||||
if debug_enabled:
|
||||
#st.code(message, language="text")
|
||||
print(strip_ansi_codes(message))
|
||||
|
||||
|
||||
def rewrite_summary_links(text:str) -> str:
|
||||
import re
|
||||
|
||||
def replace_first_slash(match):
|
||||
# Get the matched text
|
||||
link = match.group(1)
|
||||
# Replace the first slash with double underscore
|
||||
new_link = link.replace('/', '__', 1)
|
||||
return f'({new_link})'
|
||||
|
||||
# Use a regular expression to find links in the format (path/to/resource)
|
||||
pattern = r'\(([^)]+)\)'
|
||||
|
||||
# Process each line and apply the substitution
|
||||
rewritten_lines = []
|
||||
for line in text.splitlines():
|
||||
rewritten_line = re.sub(pattern, replace_first_slash, line)
|
||||
rewritten_lines.append(rewritten_line)
|
||||
|
||||
# Join the rewritten lines back together
|
||||
return '\n'.join(rewritten_lines)
|
188
_archive/openapi/generator/model_generator.py
Normal file
188
_archive/openapi/generator/model_generator.py
Normal file
@ -0,0 +1,188 @@
|
||||
from typing import Dict, List, Set, Any
|
||||
|
||||
class VlangCodeGenerator:
|
||||
pass
|
||||
|
||||
class ModelGenerator:
|
||||
def __init__(
|
||||
self, spec: Dict[str, Any], lang_code_generator: str
|
||||
) -> None:
|
||||
self.spec = spec
|
||||
self.lang_code_generator = lang_code_generator
|
||||
# self.processed_objects: Dict[str, Dict[str, str]] = {}
|
||||
# self.ordered_objects: List[str] = []
|
||||
# self.used_names: Set[str] = set()
|
||||
|
||||
def generate_models(self):
|
||||
if self.lang_code_generator != "vlang":
|
||||
raise ValueError('Unsupported language.')
|
||||
|
||||
|
||||
if not self.spec.get('components'):
|
||||
raise ValueError("No components found in spec")
|
||||
|
||||
components = self.spec['components']
|
||||
|
||||
if not components.get('schemas'):
|
||||
raise ValueError("No schemas found in components")
|
||||
|
||||
schemas = components['schemas']
|
||||
schemas_path = ["components", "schemas"]
|
||||
for name, schema in schemas.items():
|
||||
self.jsonschema_to_type(
|
||||
path=schemas_path + [name],
|
||||
jsonschema=schema,
|
||||
)
|
||||
|
||||
objects_code = ""
|
||||
for val in self.ordered_objects:
|
||||
if val == "":
|
||||
continue
|
||||
objects_code = f"{objects_code}{val}\n\n"
|
||||
|
||||
print(f'debugzo4 {objects_code}')
|
||||
return objects_code
|
||||
|
||||
# def jsonschema_to_type(
|
||||
# self, path: List[str], jsonschema: SchemaObject | ReferenceObject
|
||||
# ) -> str:
|
||||
# if isinstance(jsonschema, ReferenceObject):
|
||||
# ref: str = jsonschema.ref
|
||||
|
||||
# ref_schema = self.spec.ref_to_schema(ref)
|
||||
# ref_path = ref.split("/")[1:]
|
||||
|
||||
# if isinstance(ref_schema, ContentDescriptorObject):
|
||||
# # TODO: implement
|
||||
# raise Exception("unimplemented")
|
||||
# # return self.content_descriptor_to_type(ref_path, ref_schema)
|
||||
|
||||
# return self.jsonschema_to_type(ref_path, ref_schema)
|
||||
|
||||
# path_str = "/".join([item.lower() for item in path])
|
||||
# if path_str in self.processed_objects:
|
||||
# return self.processed_objects[path_str]["name"]
|
||||
|
||||
# type_name = self.type_name_from_path(path)
|
||||
|
||||
# description = getattr(jsonschema, 'description', None)
|
||||
# if jsonschema.enum:
|
||||
# enum = jsonschema.enum
|
||||
# type_code = self.lang_code_generator.generate_enum(enum, type_name)
|
||||
# if self.lang_code_generator.is_primitive(type_code):
|
||||
# return type_code
|
||||
|
||||
# self.add_object(path_str, type_code, type_name)
|
||||
# return type_name
|
||||
|
||||
# if jsonschema.type:
|
||||
# match jsonschema.type:
|
||||
# case "string":
|
||||
# return self.lang_code_generator.string_primitive()
|
||||
|
||||
# case "integer":
|
||||
# return self.lang_code_generator.integer_primitive()
|
||||
|
||||
# case "number":
|
||||
# return self.lang_code_generator.number_primitive()
|
||||
|
||||
# case "array":
|
||||
# if isinstance(jsonschema.items, List):
|
||||
# raise Exception(
|
||||
# "array of different item types is not supported"
|
||||
# )
|
||||
|
||||
# item_type_name = self.jsonschema_to_type(
|
||||
# path + ["item"], jsonschema.items
|
||||
# )
|
||||
# return self.lang_code_generator.array_of_type(
|
||||
# item_type_name
|
||||
# )
|
||||
|
||||
# case "boolean":
|
||||
# return self.lang_code_generator.bool_primitive()
|
||||
|
||||
# case "object":
|
||||
# # to prevent cyclic dependencies
|
||||
# self.add_object(path_str, "", type_name)
|
||||
|
||||
# properties: Dict[str, PropertyInfo] = {}
|
||||
# for (
|
||||
# property_name,
|
||||
# property_schema,
|
||||
# ) in jsonschema.properties.items():
|
||||
# schema = property_schema
|
||||
# new_path = path + ["properties", property_name]
|
||||
# if isinstance(property_schema, ReferenceObject):
|
||||
# schema = self.spec.ref_to_schema(
|
||||
# property_schema.ref
|
||||
# )
|
||||
# new_path = property_schema.ref.split("/")[1:]
|
||||
|
||||
# property_info = PropertyInfo(
|
||||
# name=property_name,
|
||||
# type_name=self.jsonschema_to_type(new_path, schema),
|
||||
# description=schema.description,
|
||||
# example=schema.example,
|
||||
# )
|
||||
|
||||
# properties[property_name] = property_info
|
||||
|
||||
# type_code = self.lang_code_generator.generate_object(
|
||||
# type_name, properties
|
||||
# )
|
||||
# self.add_object(path_str, type_code, type_name)
|
||||
# return type_name
|
||||
|
||||
# case "null":
|
||||
# return self.lang_code_generator.null_primitive()
|
||||
|
||||
# case _:
|
||||
# raise Exception(f"type {jsonschema.type} is not supported")
|
||||
|
||||
# if jsonschema.anyOf:
|
||||
# type_names = []
|
||||
# for i, item in enumerate(jsonschema.anyOf):
|
||||
# type_names.append(
|
||||
# self.jsonschema_to_type(path + [f"anyOf{i}"], item)
|
||||
# )
|
||||
|
||||
# return self.lang_code_generator.generate_multitype(type_names)
|
||||
# # self.add_object(path_str, type_code, type_code)
|
||||
# # return type_code
|
||||
|
||||
# elif jsonschema.oneOf:
|
||||
# type_names = []
|
||||
# for i, item in enumerate(jsonschema.oneOf):
|
||||
# type_names.append(
|
||||
# self.jsonschema_to_type(path + [f"oneOf{i}"], item)
|
||||
# )
|
||||
|
||||
# return self.lang_code_generator.generate_multitype(type_names)
|
||||
# # self.add_object(path_str, type_code, type_code)
|
||||
# # return type_code
|
||||
|
||||
# elif jsonschema.allOf:
|
||||
# return self.lang_code_generator.encapsulate_types(jsonschema.allOf)
|
||||
# # self.add_object(path_str, type_code, type_code)
|
||||
# # return type_name
|
||||
|
||||
# raise Exception(f"type {jsonschema.type} is not supported")
|
||||
|
||||
# def add_object(self, path_str: str, type_code: str, type_name: str):
|
||||
# self.used_names.add(type_name)
|
||||
# self.processed_objects[path_str] = {
|
||||
# "code": type_code,
|
||||
# "name": type_name,
|
||||
# }
|
||||
# print(f'debugzo21 {self.processed_objects[path_str]}')
|
||||
# self.ordered_objects.append(type_code)
|
||||
|
||||
# def type_name_from_path(self, path: List[str]) -> str:
|
||||
# type_name = ""
|
||||
# for item in reversed(path):
|
||||
# type_name += item.title() if item.islower() else item
|
||||
# if type_name not in self.used_names:
|
||||
# return type_name
|
||||
|
||||
# raise Exception(f"failed to generate unique name from path: {path}")
|
@ -0,0 +1,9 @@
|
||||
pub enum {{ type_name }}{
|
||||
{% for elem in enum -%}
|
||||
{% if is_integer -%}
|
||||
{{ number_to_words(elem) }} = {{ elem }}
|
||||
{% else -%}
|
||||
{{ elem }}
|
||||
{% endif -%}
|
||||
{% endfor %}
|
||||
}
|
@ -0,0 +1,77 @@
|
||||
pub struct {{ actor_executor_name }}{
|
||||
pub mut:
|
||||
db &backend.Backend
|
||||
redis &redisclient.Redis
|
||||
}
|
||||
|
||||
pub fn (mut executor {{ actor_executor_name }}) execute(rpc_msg_id string, rpc_msg_method string, rpc_msg_params_str string) {
|
||||
raw_params := json2.raw_decode(rpc_msg_params_str) or{
|
||||
executor.return_error(rpc_msg_id, jsonrpc.invalid_params)
|
||||
return
|
||||
}
|
||||
|
||||
params_arr := raw_params.arr()
|
||||
|
||||
match rpc_msg_method {
|
||||
{%- for method in methods %}
|
||||
'{{method.name}}' {
|
||||
{%- for param in method.params %}
|
||||
{%- if generator.is_primitive(generator.get_param_type(method.name, param))%}
|
||||
{{param.name}} := params_arr[{{loop.index0}}] as {{generator.get_param_type(method.name, param)}}
|
||||
{%- else %}
|
||||
{{param.name}} := json.decode({{generator.get_param_type(method.name, param)}}, params_arr[{{loop.index0}}].json_str()) or {
|
||||
executor.return_error(rpc_msg_id, jsonrpc.invalid_request)
|
||||
return
|
||||
}
|
||||
{%- endif %}
|
||||
{%- endfor %}
|
||||
|
||||
{%- if generator.get_method_return_type(method) == 'none' %}
|
||||
executor.{{method.name}}_internal({{generator.get_method_params_as_args(method)}}) or {
|
||||
executor.return_error(rpc_msg_id, jsonrpc.InnerJsonRpcError{
|
||||
code: 32000
|
||||
message: '${err}'
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
response := jsonrpc.JsonRpcResponse[string]{
|
||||
jsonrpc: '2.0.0'
|
||||
id: rpc_msg_id
|
||||
result: ''
|
||||
}
|
||||
{%- else %}
|
||||
result := executor.{{method.name}}_internal({{generator.get_method_params_as_args(method)}}) or {
|
||||
executor.return_error(rpc_msg_id, jsonrpc.InnerJsonRpcError{
|
||||
code: 32000
|
||||
message: '${err}'
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
response := jsonrpc.JsonRpcResponse[{{generator.get_method_return_type(method)}}]{
|
||||
jsonrpc: '2.0.0'
|
||||
id: rpc_msg_id
|
||||
result: result
|
||||
}
|
||||
{%- endif %}
|
||||
|
||||
// put response in response queue
|
||||
executor.redis.lpush(rpc_msg_id, response.to_json()) or {
|
||||
println('failed to push response for ${rpc_msg_id} to redis queue: ${err}')
|
||||
}
|
||||
}
|
||||
{%- endfor %}
|
||||
else {
|
||||
executor.return_error(rpc_msg_id, jsonrpc.method_not_found)
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn (mut executor {{actor_executor_name}}) return_error(rpc_msg_id string, error jsonrpc.InnerJsonRpcError){
|
||||
response := jsonrpc.new_jsonrpcerror(rpc_msg_id, error)
|
||||
executor.redis.lpush(rpc_msg_id, response.to_json()) or {
|
||||
println('failed to push response for ${rpc_msg_id} to redis queue: ${err}')
|
||||
}
|
||||
}
|
@ -0,0 +1,50 @@
|
||||
struct Handler {
|
||||
pub mut:
|
||||
db &backend.Backend
|
||||
redis &redisclient.Redis
|
||||
{% for actor in actors %}
|
||||
{{actor}}_executor {{get_actor_executor_name(actor)}}
|
||||
{%- endfor %}
|
||||
}
|
||||
|
||||
pub fn new(db_config backend.BackendConfig, redis_addr string) !Handler{
|
||||
db := backend.new(db_config)!
|
||||
mut redis_client := redisclient.new([redis_addr])!
|
||||
redis_client.selectdb(0)!
|
||||
|
||||
return Handler{
|
||||
db: &db
|
||||
redis: &redis_client
|
||||
{%- for actor in actors %}
|
||||
{{actor}}_executor: {{get_actor_executor_name(actor)}}{
|
||||
db: &db
|
||||
redis: &redis_client
|
||||
}
|
||||
{%- endfor %}
|
||||
}
|
||||
}
|
||||
|
||||
// handle handles an incoming JSON-RPC encoded message and returns an encoded response
|
||||
pub fn (mut handler Handler) handle(id string, method string, params_str string) {
|
||||
actor := method.all_before('.')
|
||||
method_name := method.all_after('.')
|
||||
|
||||
match actor {
|
||||
{%- for actor in actors %}
|
||||
'{{ actor }}' {
|
||||
spawn (&handler.{{actor}}_executor).execute(id, method_name, params_str)
|
||||
}
|
||||
{%- endfor %}
|
||||
else {
|
||||
handler.return_error(id, jsonrpc.method_not_found)
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn (mut handler Handler) return_error(rpc_msg_id string, error jsonrpc.InnerJsonRpcError){
|
||||
response := jsonrpc.new_jsonrpcerror(rpc_msg_id, error)
|
||||
handler.redis.lpush(rpc_msg_id, response.to_json()) or {
|
||||
println('failed to push response for ${rpc_msg_id} to redis queue: ${err}')
|
||||
}
|
||||
}
|
@ -0,0 +1,31 @@
|
||||
module myhandler
|
||||
|
||||
import x.json2
|
||||
import rand
|
||||
import freeflowuniverse.crystallib.baobab.backend
|
||||
|
||||
fn test_handler(){
|
||||
db_config := backend.BackendConfig{
|
||||
name: 'myhandler'
|
||||
secret: 'secret'
|
||||
reset: true
|
||||
db_type: .postgres
|
||||
}
|
||||
|
||||
mut handler := new(db_config, '127.0.0.1:6379')!
|
||||
{% for method_name in method_names %}
|
||||
do_request(mut handler, '{{method_name}}')!
|
||||
{%- endfor %}
|
||||
}
|
||||
|
||||
fn do_request(mut handler Handler, method_name string) ! {
|
||||
// TODO: edit input parameters
|
||||
mut params := []json2.Any{}
|
||||
params << "objid"
|
||||
params << "blabla_name"
|
||||
params_str := json2.Any(params).json_str()
|
||||
|
||||
id := rand.string(6)
|
||||
handler.handle(rand.string(6), method_name, json2.Any(params).json_str())
|
||||
println('request id: ${id}')
|
||||
}
|
@ -0,0 +1,7 @@
|
||||
pub fn (mut executor {{ actor_executor_name }}) {{function_name}}({{method_params}}) !{{return_type}}{
|
||||
// context allows us to see who the user is and which groups the user is
|
||||
// context also gives a logging feature
|
||||
// context is linked to 1 circle
|
||||
// context is linked to a DB (OSIS)
|
||||
panic('implement')
|
||||
}
|
@ -0,0 +1,28 @@
|
||||
pub fn (mut executor {{ actor_executor_name }}) {{variable_name}}_get_internal(id string) !{{type_name}}{
|
||||
json_str := executor.db.indexer.get_json(id, backend.RootObject{
|
||||
name: '{{type_name}}'
|
||||
})!
|
||||
|
||||
return json.decode({{type_name}}, json_str)!
|
||||
}
|
||||
|
||||
pub fn (mut executor {{ actor_executor_name }}) {{variable_name}}_set_internal({{variable_name}} {{type_name}}) !{
|
||||
if {{variable_name}}.oid != ''{
|
||||
executor.db.indexer.set(backend.RootObject{
|
||||
id: {{variable_name}}.oid
|
||||
name: '{{type_name}}'
|
||||
})!
|
||||
}
|
||||
|
||||
executor.db.indexer.new(backend.RootObject{
|
||||
name: '{{type_name}}'
|
||||
})!
|
||||
}
|
||||
|
||||
pub fn (mut executor {{ actor_executor_name }}) {{variable_name}}_delete_internal(id string) !{
|
||||
executor.db.indexer.delete(id, backend.RootObject{
|
||||
name: '{{type_name}}'
|
||||
})!
|
||||
}
|
||||
|
||||
|
@ -0,0 +1,5 @@
|
||||
pub struct {{method_param_struct_name}}{
|
||||
{% for param_name, param_type in params.items()%}
|
||||
{{param_name}} {{param_type}}
|
||||
{%- endfor %}
|
||||
}
|
@ -0,0 +1,75 @@
|
||||
{% if method_example -%}
|
||||
/*
|
||||
Example:
|
||||
{{ method_example }}
|
||||
*/
|
||||
{% endif -%}
|
||||
|
||||
{% if method_description -%}
|
||||
/*
|
||||
{{ method_description }}
|
||||
*/
|
||||
{% endif -%}
|
||||
pub fn {{ function_name }}({{ vlang_code_generator.get_method_params(method_params) }}) {{ method_result }}{
|
||||
mut conn := httpconnection.new(
|
||||
name: 'openrpc_client'
|
||||
url: '{{ base_url }}'
|
||||
)!
|
||||
|
||||
mut params := map[string]json2.Any{}
|
||||
{% for param_name, param_type in method_params.items() -%}
|
||||
{% if vlang_code_generator.is_primitive(param_type) %}
|
||||
params["{{ param_name }}"] = {{ param_name }}
|
||||
{% elif vlang_code_generator.is_vlang_array(param_type) %}
|
||||
mut any_arr := []json2.Any{}
|
||||
for item in {{ param_name }}{
|
||||
{% if vlang_code_generator.is_primitive(param_type[2:]) %}
|
||||
any_arr << item
|
||||
{% else %}
|
||||
any_arr << json2.raw_decode(json2.encode(item))!
|
||||
{% endif %}
|
||||
}
|
||||
params["{{ param_name }}"] = json2.Any(any_arr)
|
||||
{%else %}
|
||||
params["{{ param_name }}"] = json2.raw_decode(json2.encode({{ param_name }}))!
|
||||
{% endif %}
|
||||
{% endfor -%}
|
||||
|
||||
mut payload := map[string]json2.Any{}
|
||||
payload['jsonrpc'] = "2.0"
|
||||
payload['id'] = 0
|
||||
payload['method'] = '{{ method_name }}'
|
||||
payload['params'] = params
|
||||
|
||||
response := conn.send(method: .post, data: json2.encode(payload){% if url_path -%}, prefix: '{{ url_path }}' {% endif -%})!
|
||||
if !response.is_ok() {
|
||||
return error('failed to make rpc request: (${response.code}) ${response.data}')
|
||||
}
|
||||
|
||||
{% if return_type != 'none' %}
|
||||
mp := json2.raw_decode(response.data)!.as_map()
|
||||
res := mp['result'] or {
|
||||
return error('invalid jsonrpc result: ${response.data}')
|
||||
}
|
||||
|
||||
if res is json2.Null{
|
||||
return error('not found')
|
||||
}
|
||||
|
||||
{% if vlang_code_generator.is_primitive(return_type) %}
|
||||
return res as {{return_type}}
|
||||
{% elif vlang_code_generator.is_vlang_array(return_type) %}
|
||||
mut res_arr := {{return_type}}
|
||||
for item in res.arr() {
|
||||
{% if vlang_code_generator.is_primitive(return_type[2:]) %}
|
||||
res_arr << item as {{return_type}}
|
||||
{% else %}
|
||||
res_arr << json2.decode[{{return_type[2:]}}](item.json_str())!
|
||||
{% endif %}
|
||||
}
|
||||
return res_arr
|
||||
{%else %}
|
||||
return json2.decode[{{return_type}}](res.json_str())!
|
||||
{% endif -%}
|
||||
{% endif %}
|
||||
}
|
@ -0,0 +1,5 @@
|
||||
module {{module_name}}
|
||||
{% for item in imports %}
|
||||
import {{item}}
|
||||
{%- endfor %}
|
||||
|
@ -0,0 +1,10 @@
|
||||
@[params]
|
||||
pub struct {{ struct_name }}{
|
||||
pub mut:
|
||||
{%- for property_name, property_info in properties.items() %}
|
||||
{%- if property_info.description %}
|
||||
// {{ property_info.description }}
|
||||
{%- endif %}
|
||||
{{ property_name }} {{ property_info.type_name }}
|
||||
{%- endfor %}
|
||||
}
|
231
_archive/openapi/generator/server/vlang/vlang.py
Normal file
231
_archive/openapi/generator/server/vlang/vlang.py
Normal file
@ -0,0 +1,231 @@
|
||||
from openapi_python_client.schema import OpenAPI, Schema, Reference
|
||||
from jinja2 import Environment, FileSystemLoader
|
||||
from typing import Dict, Any
|
||||
|
||||
import os
|
||||
|
||||
|
||||
script_dir = os.path.dirname(os.path.abspath(__file__))
|
||||
env = Environment(loader=FileSystemLoader(script_dir))
|
||||
|
||||
class VlangCodeGenerator:
|
||||
def __init__(self, python_code: OpenAPI, output_dir: str) -> None:
|
||||
self.python_code = python_code
|
||||
self.output_dir = output_dir
|
||||
self.struct_template = env.get_template("templates/struct.jinja")
|
||||
|
||||
def generate(self):
|
||||
"""
|
||||
Main generation method to create V code.
|
||||
"""
|
||||
# Ensure the output directory exists
|
||||
os.makedirs(self.output_dir, exist_ok=True)
|
||||
|
||||
structs = self._generate_structs()
|
||||
print('structs: ', structs)
|
||||
# methods = self._generate_methods()
|
||||
|
||||
# # Combine structs and methods into one file
|
||||
vlang_code = structs
|
||||
output_file = f"{self.output_dir}/generated.v"
|
||||
|
||||
# Write to file
|
||||
with open(output_file, "w") as file:
|
||||
file.write(vlang_code)
|
||||
print(f"Vlang code generated at {output_file}")
|
||||
|
||||
def _generate_struct(self, struct_name: str, scheme: Schema | Reference) -> str:
|
||||
properties = {}
|
||||
code = ""
|
||||
|
||||
for field_name, field in scheme.properties.items(): # type: ignore
|
||||
v_type = self._convert_type(field.type) # type: ignore
|
||||
|
||||
if field.type == 'object': # type: ignore
|
||||
# Capitalize each part of the field name and create a nested struct name
|
||||
nested_struct_name = ''.join(part.capitalize() for part in field_name.split("_"))
|
||||
|
||||
# Generate the struct for the nested object
|
||||
code += self._generate_struct(struct_name=nested_struct_name, scheme=field)
|
||||
|
||||
# Update v_type to the newly generated nested struct name
|
||||
v_type = nested_struct_name
|
||||
|
||||
# Update the properties dictionary with type name and description
|
||||
properties[field_name] = {
|
||||
'type_name': v_type,
|
||||
'description': field.description # type: ignore
|
||||
}
|
||||
|
||||
code += "\n"
|
||||
code += self.struct_template.render(
|
||||
struct_name=struct_name,
|
||||
properties= properties # type: ignore
|
||||
)
|
||||
code += "\n"
|
||||
|
||||
return code
|
||||
|
||||
def _generate_structs(self) -> str:
|
||||
"""
|
||||
Generate V structs from OpenAPI components with support for nested objects and arrays.
|
||||
"""
|
||||
if not self.python_code.components:
|
||||
raise ValueError("No components found in spec")
|
||||
|
||||
if not self.python_code.components.schemas:
|
||||
raise ValueError("No schemas found in components")
|
||||
|
||||
code = ""
|
||||
|
||||
for struct_name, schema in self.python_code.components.schemas.items():
|
||||
code += self._generate_struct(struct_name=struct_name, scheme=schema)
|
||||
|
||||
return code
|
||||
|
||||
# structs_code = []
|
||||
# for schema_name, schema in self.python_code.components.schemas.items():
|
||||
# fields = []
|
||||
# for field_name, field in schema.properties.items(): # type: ignore
|
||||
# if field.type == "object": # type: ignore
|
||||
# # Generate a nested struct
|
||||
# parts = field_name.split("_")
|
||||
# nested_struct_name = ""
|
||||
# for part in parts:
|
||||
# nested_struct_name += part.capitalize()
|
||||
# nested_struct = self._generate_struct_from_object(nested_struct_name, field) # type: ignore
|
||||
# structs_code.append(nested_struct)
|
||||
# fields.append(f"\t{field_name} {nested_struct_name}")
|
||||
# print(f"Generated struct for {nested_struct_name}")
|
||||
# elif field.type == "array": # type: ignore
|
||||
# # Handle arrays with proper type conversion for items
|
||||
# item_type = self._convert_type(field.items.type) # type: ignore
|
||||
# fields.append(f"\t{field_name} []{item_type}")
|
||||
# else:
|
||||
# # Convert JSON schema type to V type
|
||||
# v_type = self._convert_type(field.type) # type: ignore
|
||||
# fields.append(f"\t{field_name} {v_type}")
|
||||
|
||||
# # Construct struct
|
||||
# struct_code = f"pub struct {schema_name} {{\n" + "\n".join(fields) + "\n}"
|
||||
# structs_code.append(struct_code)
|
||||
# print(f"Generated struct for {schema_name}")
|
||||
|
||||
# return "\n\n".join(structs_code)
|
||||
|
||||
# def _generate_struct_from_object(self, struct_name: str, schema: dict) -> str:
|
||||
# """
|
||||
# Generate a nested struct from an object schema.
|
||||
# """
|
||||
# fields = []
|
||||
# for field_name, field in schema.properties.items(): # type: ignore
|
||||
# v_type = self._convert_type(field.type) # type: ignore
|
||||
# fields.append(f"\t{field_name} {v_type}")
|
||||
|
||||
# return f"struct {struct_name} {{\n" + "\n".join(fields) + "\n}"
|
||||
|
||||
# def _generate_methods(self) -> str:
|
||||
# """
|
||||
# Generate V methods based on OpenAPI paths and operations.
|
||||
# """
|
||||
# if not self.python_code.paths:
|
||||
# raise ValueError("No paths found in spec")
|
||||
|
||||
# methods_code = []
|
||||
# for path, path_item in self.python_code.paths.items():
|
||||
# # Explicitly check for HTTP method attributes in PathItem
|
||||
# for http_method in ["get", "post", "put", "delete", "patch", "options", "head"]:
|
||||
# operation = getattr(path_item, http_method, None)
|
||||
# if operation:
|
||||
# # Generate method name and parameters
|
||||
# method_name = self._generate_method_name(http_method, path)
|
||||
# parameters = self._generate_method_parameters(operation.parameters)
|
||||
# request_body = self._generate_request_body(operation.request_body)
|
||||
# response_type = self._generate_response_type(operation.responses)
|
||||
|
||||
# # Combine method arguments
|
||||
# method_arguments = parameters
|
||||
# if request_body:
|
||||
# method_arguments += f", {request_body}" if parameters else request_body
|
||||
|
||||
# # Generate the method code
|
||||
# method_code = f"fn {method_name}({method_arguments}) {response_type} {{\n"
|
||||
# method_code += f"\t// TODO: Implement the {http_method.upper()} request to {path}\n"
|
||||
# method_code += "\t// Use the generated structs for request/response bodies\n"
|
||||
# method_code += "}\n"
|
||||
# methods_code.append(method_code)
|
||||
|
||||
# print(f"Generated method for {http_method.upper()} {path}")
|
||||
|
||||
# return "\n\n".join(methods_code)
|
||||
|
||||
# def _generate_method_name(self, http_method: str, path: str) -> str:
|
||||
# """
|
||||
# Generate a method name from the HTTP method and path.
|
||||
# """
|
||||
# # Remove leading/trailing slashes and replace `/` with `_`
|
||||
# sanitized_path = path.strip("/").replace("/", "_").replace("{", "").replace("}", "")
|
||||
# return f"{http_method.lower()}_{sanitized_path}"
|
||||
|
||||
# def _generate_method_parameters(self, parameters) -> str:
|
||||
# if not parameters:
|
||||
# return ""
|
||||
|
||||
# param_list = []
|
||||
# for param in parameters:
|
||||
# param_name = param.name
|
||||
# param_schema = getattr(param, "schema", None)
|
||||
# print('param_name: ', param_name)
|
||||
# print('param_schema: ', param_schema)
|
||||
# # if param_schema and param_schema.type:
|
||||
# # param_type = self._convert_type(param_schema.type)
|
||||
# # param_list.append(f"{param_name} {param_type}")
|
||||
|
||||
# return ", ".join(param_list)
|
||||
|
||||
|
||||
# def _generate_request_body(self, request_body) -> str:
|
||||
# """
|
||||
# Generate a function parameter for the request body if present.
|
||||
# """
|
||||
# if not request_body or not request_body.content:
|
||||
# return ""
|
||||
|
||||
# # Assume application/json content type
|
||||
# json_schema = request_body.content.get("application/json")
|
||||
# if not json_schema or not json_schema.schema:
|
||||
# return ""
|
||||
|
||||
# print('body_type: ', json_schema)
|
||||
# # body_type = json_schema.schema.ref.split("/")[-1] # Extract the schema name
|
||||
# return f"body {json_schema}"
|
||||
|
||||
# def _generate_response_type(self, responses) -> str:
|
||||
# """
|
||||
# Determine the return type of the method based on responses.
|
||||
# """
|
||||
# if not responses:
|
||||
# return "void"
|
||||
|
||||
# for status_code, response in responses.items():
|
||||
# if response.content and "application/json" in response.content:
|
||||
# json_schema = response.content["application/json"].schema
|
||||
# print('json_schema: ', json_schema)
|
||||
# # if json_schema and json_schema.ref:
|
||||
# # return json_schema.ref.split("/")[-1] # Extract schema name
|
||||
|
||||
# return "void"
|
||||
|
||||
def _convert_type(self, json_type: str) -> str:
|
||||
"""
|
||||
Map JSON schema types to Vlang types.
|
||||
"""
|
||||
type_mapping = {
|
||||
"string": "string",
|
||||
"integer": "int",
|
||||
"number": "f64",
|
||||
"boolean": "bool",
|
||||
"array": "[]",
|
||||
}
|
||||
return type_mapping.get(json_type, "string") # Default to `string`
|
||||
|
50
_archive/openapi/openapi.py
Normal file
50
_archive/openapi/openapi.py
Normal file
@ -0,0 +1,50 @@
|
||||
from openapi_python_client.schema import OpenAPI
|
||||
|
||||
import json
|
||||
import yaml
|
||||
|
||||
from generator.server.vlang.vlang import VlangCodeGenerator
|
||||
|
||||
class OpenApiCodeGenerator:
|
||||
def __init__(self, lang: str, spec_file: str, output_dir: str):
|
||||
self.lang = lang
|
||||
self.spec_file = spec_file
|
||||
self.output_dir = output_dir
|
||||
|
||||
def _read_file(self):
|
||||
"""
|
||||
Read the OpenAPI spec file.
|
||||
"""
|
||||
if self.spec_file.endswith(".json"):
|
||||
with open(self.spec_file, "r") as file:
|
||||
return file.read() # Return raw JSON string
|
||||
elif self.spec_file.endswith(".yaml"):
|
||||
with open(self.spec_file, "r") as file:
|
||||
# Convert YAML to JSON string for compatibility
|
||||
return json.dumps(yaml.safe_load(file))
|
||||
else:
|
||||
raise ValueError("Unsupported file format")
|
||||
|
||||
|
||||
def generate(self):
|
||||
"""
|
||||
Main generation logic for code based on the OpenAPI spec.
|
||||
"""
|
||||
file_content = self._read_file()
|
||||
openapi = OpenAPI.model_validate_json(file_content)
|
||||
if self.lang == "vlang":
|
||||
vlang_code_generator = VlangCodeGenerator(
|
||||
python_code=openapi, output_dir=self.output_dir
|
||||
)
|
||||
vlang_code_generator.generate()
|
||||
elif self.lang == "python":
|
||||
print("Python code generation not implemented yet.")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
s = OpenApiCodeGenerator(
|
||||
lang="vlang",
|
||||
spec_file="/home/thunder/work/codescalers/github/hero_server_python/lib/openapi/schema.json",
|
||||
output_dir="./output"
|
||||
)
|
||||
s.generate()
|
136
_archive/openapi/schema.json
Normal file
136
_archive/openapi/schema.json
Normal file
@ -0,0 +1,136 @@
|
||||
{
|
||||
"openapi": "3.0.3",
|
||||
"info": {
|
||||
"title": "User Management API",
|
||||
"version": "1.0.0",
|
||||
"description": "A simple API to manage users"
|
||||
},
|
||||
"servers": [
|
||||
{
|
||||
"url": "https://api.example.com/v1",
|
||||
"description": "Production server"
|
||||
}
|
||||
],
|
||||
"paths": {
|
||||
"/users": {
|
||||
"get": {
|
||||
"summary": "List all users",
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "A list of users",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": {
|
||||
"type": "array",
|
||||
"items": {
|
||||
"$ref": "#/components/schemas/User"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"post": {
|
||||
"summary": "Create a new user",
|
||||
"requestBody": {
|
||||
"required": true,
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": {
|
||||
"$ref": "#/components/schemas/User"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"responses": {
|
||||
"201": {
|
||||
"description": "User created successfully"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"/users/{userId}": {
|
||||
"get": {
|
||||
"summary": "Get a user by ID",
|
||||
"parameters": [
|
||||
{
|
||||
"name": "userId",
|
||||
"in": "path",
|
||||
"required": true,
|
||||
"schema": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
],
|
||||
"responses": {
|
||||
"200": {
|
||||
"description": "User details",
|
||||
"content": {
|
||||
"application/json": {
|
||||
"schema": {
|
||||
"$ref": "#/components/schemas/User"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"404": {
|
||||
"description": "User not found"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"components": {
|
||||
"schemas": {
|
||||
"User": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"id": {
|
||||
"type": "string",
|
||||
"example": "123",
|
||||
"description": "The unique identifier for the user"
|
||||
},
|
||||
"name": {
|
||||
"type": "string",
|
||||
"example": "John Doe"
|
||||
},
|
||||
"email": {
|
||||
"type": "string",
|
||||
"example": "john.doe@example.com"
|
||||
},
|
||||
"user_profile": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"age": {
|
||||
"type": "integer"
|
||||
},
|
||||
"address": {
|
||||
"type": "string"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"required": [
|
||||
"id",
|
||||
"name",
|
||||
"email"
|
||||
]
|
||||
},
|
||||
"UserBalance": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"id": {
|
||||
"type": "string",
|
||||
"example": "123",
|
||||
"description": "The unique identifier for the user"
|
||||
},
|
||||
"name": {
|
||||
"type": "string",
|
||||
"example": "John Doe"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
68
_archive/openapi/schema.yml
Normal file
68
_archive/openapi/schema.yml
Normal file
@ -0,0 +1,68 @@
|
||||
openapi: 3.0.3
|
||||
info:
|
||||
title: User Management API
|
||||
version: 1.0.0
|
||||
description: A simple API to manage users
|
||||
servers:
|
||||
- url: https://api.example.com/v1
|
||||
description: Production server
|
||||
paths:
|
||||
/users:
|
||||
get:
|
||||
summary: List all users
|
||||
responses:
|
||||
'200':
|
||||
description: A list of users
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
type: array
|
||||
items:
|
||||
$ref: '#/components/schemas/User'
|
||||
post:
|
||||
summary: Create a new user
|
||||
requestBody:
|
||||
required: true
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/User'
|
||||
responses:
|
||||
'201':
|
||||
description: User created successfully
|
||||
/users/{userId}:
|
||||
get:
|
||||
summary: Get a user by ID
|
||||
parameters:
|
||||
- name: userId
|
||||
in: path
|
||||
required: true
|
||||
schema:
|
||||
type: string
|
||||
responses:
|
||||
'200':
|
||||
description: User details
|
||||
content:
|
||||
application/json:
|
||||
schema:
|
||||
$ref: '#/components/schemas/User'
|
||||
'404':
|
||||
description: User not found
|
||||
components:
|
||||
schemas:
|
||||
User:
|
||||
type: object
|
||||
properties:
|
||||
id:
|
||||
type: string
|
||||
example: '123'
|
||||
name:
|
||||
type: string
|
||||
example: John Doe
|
||||
email:
|
||||
type: string
|
||||
example: john.doe@example.com
|
||||
required:
|
||||
- id
|
||||
- name
|
||||
- email
|
23
_archive/openrpc/__init__.py
Normal file
23
_archive/openrpc/__init__.py
Normal file
@ -0,0 +1,23 @@
|
||||
from heroserver.openrpc.factory import openrpc_dict, openrpc_spec, openrpc_spec_write
|
||||
from heroserver.openrpc.model.openrpc_spec import OpenRPCSpec
|
||||
|
||||
|
||||
def init_openrpc_dict(path: str = "") -> dict:
|
||||
"""
|
||||
return openrpc dict
|
||||
"""
|
||||
return openrpc_dict(path=path)
|
||||
|
||||
|
||||
def init_openrpc_spec_write(path: str = "", dest: str = "") -> str:
|
||||
"""
|
||||
parse & write the specs to the destination, the path will be ${destination}/openrpc_spec.json" and .../openrpc_spec.yaml"
|
||||
"""
|
||||
return openrpc_spec_write(path=path, dest=dest)
|
||||
|
||||
|
||||
def init_openrpc_spec(path: str = "") -> OpenRPCSpec:
|
||||
"""
|
||||
return openrpc object
|
||||
"""
|
||||
return openrpc_spec(path=path)
|
58
_archive/openrpc/factory.py
Normal file
58
_archive/openrpc/factory.py
Normal file
@ -0,0 +1,58 @@
|
||||
import json
|
||||
import os
|
||||
|
||||
import yaml # type: ignore
|
||||
|
||||
from heroserver.openrpc.model.openrpc_spec import (
|
||||
OpenRPCSpec,
|
||||
)
|
||||
from heroserver.openrpc.parser.parser import parser
|
||||
|
||||
|
||||
def openrpc_spec_write(path: str = "", dest: str = "") -> str:
|
||||
"""
|
||||
parse & write the specs
|
||||
dest is the path where we write the openrpc specs
|
||||
returns filename = f"{dest}/openrpc_spec.json"
|
||||
"""
|
||||
data = openrpc_dict(path=path)
|
||||
|
||||
out = json.dumps(data, indent=2)
|
||||
# print(out)
|
||||
|
||||
dest = os.path.expanduser(dest)
|
||||
os.makedirs(dest, exist_ok=True)
|
||||
|
||||
filename = f"{dest}/openrpc_spec.json"
|
||||
# Write the spec to the file
|
||||
with open(filename, "w") as f:
|
||||
f.write(out)
|
||||
print(f"OpenRPC specification (JSON) has been written to: {filename}")
|
||||
|
||||
yaml_filename = f"{dest}/openrpc_spec.yaml"
|
||||
with open(yaml_filename, "w") as f:
|
||||
yaml.dump(data, f, sort_keys=False)
|
||||
print(f"OpenRPC specification (YAML) has been written to: {yaml_filename}")
|
||||
|
||||
return filename
|
||||
|
||||
|
||||
def openrpc_spec(path: str = "") -> OpenRPCSpec:
|
||||
"""
|
||||
return openrpc object starting from spec path
|
||||
this is our python representation of OpenRPCSpec
|
||||
"""
|
||||
data = openrpc_dict(path=path)
|
||||
|
||||
spec_object = OpenRPCSpec.load(data)
|
||||
|
||||
return spec_object
|
||||
|
||||
|
||||
def openrpc_dict(path: str = "") -> dict:
|
||||
"""
|
||||
return openrpc dict starting from spec path
|
||||
"""
|
||||
data = parser(path=path)
|
||||
|
||||
return data
|
91
_archive/openrpc/factory_model.py
Normal file
91
_archive/openrpc/factory_model.py
Normal file
@ -0,0 +1,91 @@
|
||||
import os
|
||||
from pathlib import Path
|
||||
from typing import Dict, Optional
|
||||
|
||||
from heroserver.openrpc.factory import openrpc_dict, openrpc_spec, openrpc_spec_write
|
||||
from heroserver.openrpc.model.openrpc_spec import OpenRPCSpec
|
||||
|
||||
|
||||
class OpenRPCFactory:
|
||||
def __init__(self, generation_path: str, spec_path: str):
|
||||
"""
|
||||
Initialize the OpenRPCFactory with a generation path and a spec path.
|
||||
|
||||
:param generation_path: The path where the generation will occur.
|
||||
:param spec_path: The path to the OpenRPC specification (in vlang format).
|
||||
"""
|
||||
import os.path
|
||||
|
||||
self.actors: Dict[str, OpenRPCActor] = {}
|
||||
self.generation_path: str = os.path.expanduser(generation_path)
|
||||
self.spec_path: str = os.path.expanduser(spec_path)
|
||||
|
||||
def add_actor(self, actor: "OpenRPCActor"):
|
||||
self.actors[actor.name] = actor
|
||||
|
||||
def get_actor(self, name: str) -> Optional["OpenRPCActor"]:
|
||||
return self.actors.get(name)
|
||||
|
||||
def remove_actor(self, name: str) -> None:
|
||||
self.actors.pop(name, None)
|
||||
|
||||
def scan(self):
|
||||
for subdir in os.listdir(self.spec_path):
|
||||
subdir_path = os.path.join(self.spec_path, subdir)
|
||||
if os.path.isdir(subdir_path):
|
||||
actor = OpenRPCActor(name=subdir, path_ourspec=subdir_path, parent=self)
|
||||
self.add_actor(actor)
|
||||
|
||||
|
||||
class OpenRPCActor:
|
||||
def __init__(self, name: str, path_ourspec: str, parent: OpenRPCFactory):
|
||||
self.name: str = name
|
||||
self.path_ourspec: str = path_ourspec # the directory where we parse & generate
|
||||
self.path_openrpc: str = os.path.join(parent.generation_path, self.name) # the file which represents openrpc spec
|
||||
self.parent = parent
|
||||
|
||||
self.openrpc_spec: OpenRPCSpec = openrpc_spec(path=path_ourspec)
|
||||
|
||||
def openrpc_dict(self) -> dict:
|
||||
return openrpc_dict(path=self.path_ourspec)
|
||||
|
||||
def openrpc_spec_write(self) -> dict:
|
||||
return openrpc_spec_write(path=self.path_ourspec, dest=self.path_openrpc)
|
||||
|
||||
def openrpc_spec_yaml_path(self) -> str:
|
||||
yaml_path = os.path.join(self.path_openrpc, "openrpc_spec.yaml")
|
||||
if not os.path.exists(yaml_path):
|
||||
self.openrpc_spec_write()
|
||||
return yaml_path
|
||||
|
||||
def openrpc_spec_json_path(self) -> str:
|
||||
json_path = os.path.join(self.path_openrpc, "openrpc_spec.json")
|
||||
if not os.path.exists(json_path):
|
||||
self.openrpc_spec_write()
|
||||
return json_path
|
||||
|
||||
def generate_rest_server(self):
|
||||
from heroserver.openrpc.generator.rest_server.python.rest_server_generator import RestServerGenerator
|
||||
|
||||
rest_server_generator = RestServerGenerator(self.openrpc_spec, Path(self.path_openrpc))
|
||||
rest_server_generator.generate()
|
||||
|
||||
|
||||
def new(generation_path: str, spec_path: str) -> OpenRPCFactory:
|
||||
"""
|
||||
Create a new OpenRPCFactory and return OpenRPCActors, starting from a path.
|
||||
|
||||
:param generation_path: The path where the generation will occur.
|
||||
:param spec_path: The path to the OpenRPC specification.
|
||||
:return: An instance of OpenRPCFactory with actors initialized.
|
||||
"""
|
||||
factory = OpenRPCFactory(generation_path=generation_path, spec_path=spec_path)
|
||||
factory.scan()
|
||||
return factory
|
||||
|
||||
|
||||
# Usage example:
|
||||
# spec = OpenRPCSpec(...) # Create an OpenRPCSpec instance
|
||||
# actor = OpenRPCActor("MyActor", "/path/to/actor", spec, "/path/to/openrpc.json")
|
||||
# actors = OpenRPCActors()
|
||||
# actors.add_actor(actor)
|
0
_archive/openrpc/generator/__init__.py
Normal file
0
_archive/openrpc/generator/__init__.py
Normal file
77
_archive/openrpc/generator/client/generator.py
Normal file
77
_archive/openrpc/generator/client/generator.py
Normal file
@ -0,0 +1,77 @@
|
||||
from typing import Dict, List, Optional, Union
|
||||
from urllib.parse import urlparse
|
||||
|
||||
from heroserver.openrpc.generator.code.lang_code_generator import LangCodeGenerator
|
||||
from heroserver.openrpc.generator.model.model_generator import ModelGenerator
|
||||
|
||||
from heroserver.openrpc.model.common import (
|
||||
ContentDescriptorObject,
|
||||
ReferenceObject,
|
||||
)
|
||||
from heroserver.openrpc.model.openrpc_spec import OpenRPCSpec
|
||||
|
||||
|
||||
class ClientGenerator:
|
||||
def __init__(
|
||||
self,
|
||||
spec: OpenRPCSpec,
|
||||
lang_code_generator: LangCodeGenerator,
|
||||
output_file: str,
|
||||
) -> None:
|
||||
self.spec = spec
|
||||
self.model_generator = ModelGenerator(spec, lang_code_generator)
|
||||
self.lang_code_generator = lang_code_generator
|
||||
self.output_file = output_file
|
||||
|
||||
def generate_client(self):
|
||||
code_pre = self.lang_code_generator.generate_imports()
|
||||
code_models = self.model_generator.generate_models()
|
||||
code_methods = self.generate_methods()
|
||||
|
||||
# Write the generated code to a file
|
||||
with open(self.output_file, "w") as file:
|
||||
file.write(code_pre)
|
||||
file.write("\n")
|
||||
file.write(code_models)
|
||||
file.write("\n")
|
||||
file.write(code_methods)
|
||||
|
||||
print(f"Generated API code has been written to {self.output_file}")
|
||||
|
||||
def generate_methods(self):
|
||||
servers = self.spec.servers
|
||||
base_url = "http://localhost:8000"
|
||||
if servers:
|
||||
base_url = servers[0].url
|
||||
|
||||
url = urlparse(base_url)
|
||||
methods = []
|
||||
for method_spec in self.spec.methods:
|
||||
params: Dict[str, str] = {}
|
||||
for param in method_spec.params:
|
||||
params[param.name] = self.model_generator.jsonschema_to_type(
|
||||
["methods", method_spec.name, "params", param.name],
|
||||
param.schema,
|
||||
)
|
||||
|
||||
return_type = self.method_result_return_type(["methods", method_spec.name, "result"], method_spec.result)
|
||||
methods.append(self.lang_code_generator.generate_method(method_spec, url, params, return_type))
|
||||
|
||||
return "\n\n".join(methods)
|
||||
|
||||
def method_result_return_type(
|
||||
self,
|
||||
path: List[str],
|
||||
method_result: Optional[Union[ContentDescriptorObject, ReferenceObject]],
|
||||
) -> str:
|
||||
if not method_result:
|
||||
type_name = ""
|
||||
|
||||
if isinstance(method_result, ContentDescriptorObject):
|
||||
schema = method_result.schema
|
||||
type_name = self.model_generator.jsonschema_to_type(path, schema)
|
||||
|
||||
elif isinstance(method_result, ReferenceObject):
|
||||
type_name = self.model_generator.jsonschema_to_type(path, method_result)
|
||||
|
||||
return type_name
|
177
_archive/openrpc/generator/code/golang/golang_code_generator.py
Normal file
177
_archive/openrpc/generator/code/golang/golang_code_generator.py
Normal file
@ -0,0 +1,177 @@
|
||||
import json
|
||||
import os
|
||||
from typing import Any, Dict, List
|
||||
from urllib.parse import ParseResult
|
||||
|
||||
import inflect
|
||||
from jinja2 import Environment, FileSystemLoader
|
||||
from heroserver.openrpc.generator.lang_code_generator import LangCodeGenerator, PropertyInfo
|
||||
|
||||
from heroserver.openrpc.model.common import (
|
||||
ReferenceObject,
|
||||
SchemaObject,
|
||||
)
|
||||
from heroserver.openrpc.model.methods import MethodObject
|
||||
from heroserver.openrpc.model.openrpc_spec import (
|
||||
OpenRPCSpec,
|
||||
)
|
||||
|
||||
script_dir = os.path.dirname(os.path.abspath(__file__))
|
||||
env = Environment(loader=FileSystemLoader(script_dir))
|
||||
inflector = inflect.engine()
|
||||
|
||||
|
||||
class GolangCodeGenerator(LangCodeGenerator):
|
||||
def __init__(self) -> None:
|
||||
self.struct_template = env.get_template("templates/struct.jinja")
|
||||
self.methods_template = env.get_template("templates/methods.jinja")
|
||||
self.pre_template = env.get_template("templates/pre.jinja")
|
||||
|
||||
def generate_imports(self) -> str:
|
||||
return self.pre_template.render(
|
||||
package_name="rpcclient",
|
||||
imports=[
|
||||
"net/http",
|
||||
"github.com/mitchellh/mapstructure",
|
||||
"encoding/json",
|
||||
"bytes",
|
||||
"fmt",
|
||||
"io",
|
||||
],
|
||||
)
|
||||
|
||||
def generate_object(
|
||||
self,
|
||||
type_name: str,
|
||||
properties: Dict[str, PropertyInfo],
|
||||
):
|
||||
return self.struct_template.render(generator=self, type_name=type_name, properties=properties)
|
||||
|
||||
def generate_method(
|
||||
self,
|
||||
method_spec: MethodObject,
|
||||
url: ParseResult,
|
||||
params: Dict[str, str],
|
||||
return_type: str,
|
||||
) -> str:
|
||||
function_name = self.get_camel_case_name(method_spec.name)
|
||||
method_name = method_spec.name
|
||||
method_result = self.type_to_method_result(return_type)
|
||||
method_description = ""
|
||||
if method_spec.description:
|
||||
method_description = method_spec.description.replace("'", " ")
|
||||
|
||||
method_example = ""
|
||||
if method_spec.examples and len(method_spec.examples) > 0:
|
||||
method_example = json.dumps(method_spec.examples[0], indent=4)
|
||||
|
||||
method_code = self.methods_template.render(
|
||||
generator=self,
|
||||
url=url.geturl(),
|
||||
function_name=function_name,
|
||||
method_name=method_name,
|
||||
method_params=params,
|
||||
method_result=method_result,
|
||||
return_type=return_type,
|
||||
method_description=method_description,
|
||||
method_example=method_example,
|
||||
)
|
||||
|
||||
return method_code
|
||||
|
||||
def string_primitive(self) -> str:
|
||||
return "string"
|
||||
|
||||
def integer_primitive(self) -> str:
|
||||
return "int64"
|
||||
|
||||
def number_primitive(self) -> str:
|
||||
return "float64"
|
||||
|
||||
def null_primitive(self) -> str:
|
||||
return "nil"
|
||||
|
||||
def bool_primitive(self) -> str:
|
||||
return "bool"
|
||||
|
||||
def array_of_type(self, type_name: str) -> str:
|
||||
return f"[]{type_name}"
|
||||
|
||||
def generate_multitype(self, types: List[str]) -> str:
|
||||
if len(types) > 2:
|
||||
raise Exception("only a type and null are supported with anyOf/allOf keyword")
|
||||
|
||||
if len(types) == 1:
|
||||
return types[0]
|
||||
|
||||
if types[0] == "nil":
|
||||
return f"*{types[1]}"
|
||||
if types[1] == "nil":
|
||||
return f"*{types[0]}"
|
||||
|
||||
raise Exception("only a type and null are supported with anyOf/allOf keyword")
|
||||
|
||||
def encapsulate_types(self, path: List[str], types: List[SchemaObject | ReferenceObject]) -> str:
|
||||
raise Exception("no support for allOf keyword")
|
||||
|
||||
def generate_enum(self, enum: List[Any], type_name: str) -> str:
|
||||
if all(isinstance(elem, str) for elem in enum):
|
||||
return self.string_primitive()
|
||||
|
||||
elif all(isinstance(elem, int) for elem in enum):
|
||||
return self.integer_primitive()
|
||||
|
||||
else:
|
||||
raise Exception(f"failed to generate enum code for: {enum}")
|
||||
|
||||
def type_to_method_result(self, type_name: str) -> str:
|
||||
method_result = "error"
|
||||
if len(type_name) > 0 and type_name != "nil":
|
||||
method_result = f"({type_name}, error)"
|
||||
|
||||
return method_result
|
||||
|
||||
def is_primitive(self, type: str) -> bool:
|
||||
return type in ["int64", "float64", "int", "bool", "string"]
|
||||
|
||||
def is_array(self, type: str) -> bool:
|
||||
return type.startswith("[]")
|
||||
|
||||
def get_method_params(self, method_params: Dict[str, str]) -> str:
|
||||
return ", ".join([f"{param_name} {param_type}" for param_name, param_type in method_params.items()])
|
||||
|
||||
def get_camel_case_name(self, method_name: str) -> str:
|
||||
return "".join([item.title() for item in method_name.split("_")])
|
||||
|
||||
def get_default_return_with_error(self, return_type: str, error_statement: str) -> str:
|
||||
if return_type == "nil":
|
||||
return error_statement
|
||||
|
||||
if return_type == "string":
|
||||
return f'"", {error_statement}'
|
||||
|
||||
if return_type == "bool":
|
||||
return f"false, {error_statement}"
|
||||
|
||||
if return_type == "float64" or return_type == "int64":
|
||||
return f"0, {error_statement}"
|
||||
|
||||
return f"{return_type}{{}}, {error_statement}"
|
||||
|
||||
|
||||
# main()
|
||||
if __name__ == "__main__":
|
||||
from heroserver.openrpc.generator.generator import ClientGenerator
|
||||
from heroserver.openrpc.parser.parser import parser
|
||||
|
||||
data = parser(path="/root/code/git.threefold.info/projectmycelium/hero_server/generatorexamples/example1/specs/storymanager")
|
||||
|
||||
spec_object = OpenRPCSpec.load(data)
|
||||
golang_code_generator = GolangCodeGenerator()
|
||||
generator = ClientGenerator(
|
||||
spec_object,
|
||||
golang_code_generator,
|
||||
"/tmp/go_client_new.go",
|
||||
)
|
||||
|
||||
generator.generate_client()
|
@ -0,0 +1,92 @@
|
||||
{% if method_example -%}
|
||||
/*
|
||||
Example:
|
||||
{{ method_example }}
|
||||
*/
|
||||
{% endif -%}
|
||||
|
||||
{% if method_description -%}
|
||||
/*
|
||||
{{ method_description }}
|
||||
*/
|
||||
{% endif -%}
|
||||
func {{ function_name }}({{ generator.get_method_params(method_params) }}) {{ method_result }} {
|
||||
params := map[string]interface{}{}
|
||||
{%- for param_name, param_type in method_params.items() %}
|
||||
params["{{param_name}}"] = {{param_name}}
|
||||
{%- endfor %}
|
||||
|
||||
payload := map[string]interface{}{}
|
||||
payload["jsonrpc"] = "2.0"
|
||||
payload["id"] = 0
|
||||
payload["method"] = "{{ method_name }}"
|
||||
payload["params"] = params
|
||||
|
||||
payloadBytes, err := json.Marshal(payload)
|
||||
if err != nil{
|
||||
return {{generator.get_default_return_with_error(return_type, 'err')}}
|
||||
}
|
||||
|
||||
resp, err := http.Post("{{url}}", "application/json", bytes.NewBuffer(payloadBytes))
|
||||
if err != nil{
|
||||
return {{generator.get_default_return_with_error(return_type, 'fmt.Errorf("failed to make post request: %w", err)')}}
|
||||
}
|
||||
|
||||
if resp.StatusCode >= 400{
|
||||
return {{generator.get_default_return_with_error(return_type, 'fmt.Errorf("request failed with status %d: %s", resp.StatusCode, resp.Status)')}}
|
||||
}
|
||||
|
||||
{%- if return_type != 'nil' %}
|
||||
defer resp.Body.Close()
|
||||
body, err := io.ReadAll(resp.Body)
|
||||
if err != nil{
|
||||
return {{generator.get_default_return_with_error(return_type, 'fmt.Errorf("failed to read response body: %w", err)')}}
|
||||
}
|
||||
|
||||
mp := map[string]interface{}{}
|
||||
if err := json.Unmarshal(body, &mp); err != nil{
|
||||
return {{generator.get_default_return_with_error(return_type, 'fmt.Errorf("failed to decode response body: %w", err)')}}
|
||||
}
|
||||
|
||||
result, ok := mp["result"]
|
||||
if !ok {
|
||||
return {{generator.get_default_return_with_error(return_type, 'fmt.Errorf("invalid jsonrpc result: %v", mp)')}}
|
||||
}
|
||||
|
||||
if result == nil {
|
||||
{%- if return_type == 'nil '%}
|
||||
return {{generator.get_default_return_with_error(return_type, 'nil')}}
|
||||
{%- else %}
|
||||
return {{generator.get_default_return_with_error(return_type, 'fmt.Errorf("invalid jsonrpc result: {{return_type}} was expected but found nil")')}}
|
||||
{%- endif %}
|
||||
}
|
||||
|
||||
{%- if generator.is_primitive(return_type) %}
|
||||
return result.({{return_type}}), nil
|
||||
{%- elif generator.is_array(return_type) %}
|
||||
resSlice := {{return_type}}{}
|
||||
for item := range result.([]intreface{}) {
|
||||
{%- if generator.is_primitive(return_type[2:]) %}
|
||||
resSlice = append(resSlice, item.({{return_type[2:]}}))
|
||||
{%- else %}
|
||||
tmp := {{return_type[2:]}}{}
|
||||
if err := mapstructure.Decode(item, &tmp); err != nil{
|
||||
return {{generator.get_default_return_with_error(return_type, 'fmt.Errorf("failed to decode result: %w", err)')}}
|
||||
}
|
||||
|
||||
resSlice = append(resSlice, tmp)
|
||||
{%- endif %}
|
||||
}
|
||||
return resSlice, nil
|
||||
{%- else %}
|
||||
ret := {{return_type}}{}
|
||||
if err := mapstructure.Decode(result, &ret); err != nil{
|
||||
return {{generator.get_default_return_with_error(return_type, 'fmt.Errorf("failed to decode result: %w", err)')}}
|
||||
}
|
||||
|
||||
return ret, nil
|
||||
{%- endif %}
|
||||
{%- else %}
|
||||
return nil
|
||||
{%- endif %}
|
||||
}
|
@ -0,0 +1,5 @@
|
||||
package {{package_name}}
|
||||
{% for item in imports %}
|
||||
import "{{item}}"
|
||||
{%- endfor %}
|
||||
|
@ -0,0 +1,8 @@
|
||||
type {{type_name}} struct{
|
||||
{%- for property_name, property_info in properties.items() %}
|
||||
{%- if property_info.description %}
|
||||
// {{ property_info.description }}
|
||||
{%- endif %}
|
||||
{{ generator.get_camel_case_name(property_name) }} {{ property_info.type_name }} `json:"{{property_name}}"`
|
||||
{%- endfor%}
|
||||
}
|
97
_archive/openrpc/generator/code/lang_code_generator.py
Normal file
97
_archive/openrpc/generator/code/lang_code_generator.py
Normal file
@ -0,0 +1,97 @@
|
||||
from abc import ABC, abstractmethod
|
||||
from typing import Any, Dict, List, Optional, Union
|
||||
from urllib.parse import ParseResult
|
||||
|
||||
from heroserver.openrpc.model.common import (
|
||||
ReferenceObject,
|
||||
SchemaObject,
|
||||
)
|
||||
from heroserver.openrpc.model.methods import MethodObject
|
||||
|
||||
|
||||
class PropertyInfo:
|
||||
def __init__(
|
||||
self,
|
||||
name: str,
|
||||
type_name: str,
|
||||
description: Optional[str] = None,
|
||||
example: Optional[str] = None,
|
||||
) -> None:
|
||||
self.name = name
|
||||
self.type_name = type_name
|
||||
self.description = description
|
||||
self.example = example
|
||||
|
||||
|
||||
class LangCodeGenerator(ABC):
|
||||
@abstractmethod
|
||||
def generate_imports(self) -> str:
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def generate_object(
|
||||
self,
|
||||
type_name: str,
|
||||
properties: Dict[str, PropertyInfo],
|
||||
):
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def generate_method(
|
||||
self,
|
||||
method_spec: MethodObject,
|
||||
url: ParseResult,
|
||||
params: Dict[str, str],
|
||||
return_type: str,
|
||||
) -> str:
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def string_primitive(self) -> str:
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def integer_primitive(self) -> str:
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def number_primitive(self) -> str:
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def null_primitive(self) -> str:
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def bool_primitive(self) -> str:
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def is_primitive(self, type_name: str) -> bool:
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def generate_multitype(self, path: List[str], types: List[Union[SchemaObject, ReferenceObject]]) -> str:
|
||||
"""handles `anyOf` and `oneOf` in a json schema"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def array_of_type(self, type_name: str) -> str:
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def encapsulate_types(self, path: List[str], types: List[Union[SchemaObject, ReferenceObject]]) -> str:
|
||||
"""handles `allOf` in a json schema"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def generate_enum(self, enum: List[Any], type_name: str) -> str:
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def type_to_method_result(self, type_name: str) -> str:
|
||||
"""
|
||||
convert type to method result
|
||||
- type_name can be empty
|
||||
"""
|
||||
pass
|
205
_archive/openrpc/generator/code/python/python_code_generator.py
Normal file
205
_archive/openrpc/generator/code/python/python_code_generator.py
Normal file
@ -0,0 +1,205 @@
|
||||
import json
|
||||
import os
|
||||
from typing import Any, Dict, List
|
||||
from urllib.parse import ParseResult
|
||||
|
||||
import inflect
|
||||
from jinja2 import Environment, FileSystemLoader
|
||||
|
||||
from heroserver.openrpc.generator.code.lang_code_generator import LangCodeGenerator, PropertyInfo
|
||||
from heroserver.openrpc.model.common import (
|
||||
ReferenceObject,
|
||||
SchemaObject,
|
||||
)
|
||||
from heroserver.openrpc.model.methods import MethodObject
|
||||
from heroserver.openrpc.model.openrpc_spec import (
|
||||
OpenRPCSpec,
|
||||
)
|
||||
|
||||
script_dir = os.path.dirname(os.path.abspath(__file__))
|
||||
env = Environment(loader=FileSystemLoader(script_dir))
|
||||
inflector = inflect.engine()
|
||||
|
||||
STRING_PRIMITIVE = "str"
|
||||
INT_PRIMITIVE = "int"
|
||||
FLOAT_PRIMITIVE = "float"
|
||||
BOOL_PRMITIVE = "bool"
|
||||
NONE_PRIMITIVE = "None"
|
||||
|
||||
|
||||
class PythonCodeGenerator(LangCodeGenerator):
|
||||
def __init__(self) -> None:
|
||||
self.class_template = env.get_template("templates/class.jinja")
|
||||
self.enum_template = env.get_template("templates/enum.jinja")
|
||||
self.method_template = env.get_template("templates/method.jinja")
|
||||
self.pre_template = env.get_template("templates/pre.jinja")
|
||||
|
||||
def generate_imports(self) -> str:
|
||||
return self.pre_template.render()
|
||||
|
||||
def generate_object(
|
||||
self,
|
||||
type_name: str,
|
||||
properties: Dict[str, PropertyInfo],
|
||||
):
|
||||
# for name, info in properties.items():
|
||||
# info["load_code"] = self.generate_load_code(name, info['type'], 'data', f'data["{name}"]')
|
||||
|
||||
return self.class_template.render(python_code_generator=self, class_name=type_name, properties=properties)
|
||||
|
||||
def generate_load_code(self, name: str, type_name: str, data_source: str, load_param: str) -> str:
|
||||
if type_name.startswith("Optional"):
|
||||
type_name = type_name.removeprefix("Optional[").removesuffix("]")
|
||||
return f'({self.generate_load_code(name, type_name, data_source)} if "{name}" in {data_source} else None)'
|
||||
|
||||
if type_name.startswith("List"):
|
||||
type_name = type_name.removeprefix("List[").removesuffix("]")
|
||||
if self.is_primitive(type_name):
|
||||
return f'{data_source}.get("{name}")'
|
||||
return f'[{self.generate_load_code(name, type_name, data_source, 'item')} for item in {data_source}.get("{name}", [])]'
|
||||
|
||||
if self.is_primitive(type_name):
|
||||
return f'{data_source}.get("{name}")'
|
||||
|
||||
return f"{type_name}.load({load_param})"
|
||||
|
||||
def generate_method(
|
||||
self,
|
||||
method_spec: MethodObject,
|
||||
url: ParseResult,
|
||||
params: Dict[str, str],
|
||||
return_type: str,
|
||||
) -> str:
|
||||
function_name = method_spec.name.lower().replace(".", "_")
|
||||
method_name = method_spec.name
|
||||
method_result = self.type_to_method_result(return_type)
|
||||
method_description = ""
|
||||
if method_spec.description:
|
||||
method_description = method_spec.description.replace("'", " ")
|
||||
method_description = method_description.replace("\n", "\n# ")
|
||||
|
||||
method_example = ""
|
||||
if method_spec.examples and len(method_spec.examples) > 0:
|
||||
method_example = json.dumps(method_spec.examples[0], indent=4)
|
||||
method_example.replace("\n", "\n#")
|
||||
|
||||
method_code = self.method_template.render(
|
||||
python_code_generator=self,
|
||||
base_url=f"{url.scheme}://{url.netloc}",
|
||||
url_path=url.path,
|
||||
function_name=function_name,
|
||||
method_name=method_name,
|
||||
method_params=params,
|
||||
method_result=method_result,
|
||||
return_type=return_type,
|
||||
method_description=method_description,
|
||||
method_example=method_example,
|
||||
)
|
||||
|
||||
return method_code
|
||||
|
||||
def string_primitive(self) -> str:
|
||||
return STRING_PRIMITIVE
|
||||
|
||||
def integer_primitive(self) -> str:
|
||||
return INT_PRIMITIVE
|
||||
|
||||
def number_primitive(self) -> str:
|
||||
return FLOAT_PRIMITIVE
|
||||
|
||||
def null_primitive(self) -> str:
|
||||
return NONE_PRIMITIVE
|
||||
|
||||
def bool_primitive(self) -> str:
|
||||
return BOOL_PRMITIVE
|
||||
|
||||
def array_of_type(self, type_name: str) -> str:
|
||||
return f"List[{type_name}]"
|
||||
|
||||
def generate_multitype(self, types: List[str]) -> str:
|
||||
if len(types) > 2:
|
||||
raise Exception("only a type and null are supported with anyOf/allOf keyword")
|
||||
|
||||
if len(types) == 1:
|
||||
return types[0]
|
||||
|
||||
if types[0] == NONE_PRIMITIVE:
|
||||
return f"Optional[{types[1]}]"
|
||||
if types[1] == NONE_PRIMITIVE:
|
||||
return f"Optional[{types[0]}]"
|
||||
|
||||
raise Exception("only a type and null are supported with anyOf/allOf keyword")
|
||||
|
||||
def encapsulate_types(self, path: List[str], types: List[SchemaObject | ReferenceObject]) -> str:
|
||||
raise Exception("no support for allOf keyword")
|
||||
|
||||
def generate_enum(self, enum: List[Any], type_name: str) -> str:
|
||||
if all(isinstance(elem, str) for elem in enum):
|
||||
# enum of strings
|
||||
return self.enum_template.render(
|
||||
enum=enum,
|
||||
type_name=type_name,
|
||||
number_to_words=inflector.number_to_words,
|
||||
)
|
||||
|
||||
elif all(isinstance(elem, int) for elem in enum):
|
||||
# enum of integers
|
||||
return self.enum_template.render(
|
||||
is_integer=True,
|
||||
enum=enum,
|
||||
type_name=type_name,
|
||||
number_to_words=inflector.number_to_words,
|
||||
)
|
||||
|
||||
else:
|
||||
raise Exception(f"failed to generate enum code for: {enum}")
|
||||
|
||||
def type_to_method_result(self, type_name: str) -> str:
|
||||
return type_name
|
||||
|
||||
def get_method_params(self, method_params: Dict[str, str]) -> str:
|
||||
return ", ".join([f"{param_name}: {param_type}" for param_name, param_type in method_params.items()])
|
||||
|
||||
def is_primitive(self, type_name: str) -> bool:
|
||||
return type_name in [STRING_PRIMITIVE, INT_PRIMITIVE, FLOAT_PRIMITIVE, BOOL_PRMITIVE] or any(
|
||||
type_name.startswith(end) for end in ["List", "Optional", "Union"]
|
||||
)
|
||||
|
||||
def get_pydantic_field_params(self, prop_info: PropertyInfo) -> str:
|
||||
field_str = ""
|
||||
if prop_info.type_name.startswith("Optional"):
|
||||
field_str = "None"
|
||||
else:
|
||||
field_str = "..."
|
||||
|
||||
if prop_info.description:
|
||||
field_str += f', description="{prop_info.description}"'
|
||||
|
||||
if prop_info.example:
|
||||
if isinstance(prop_info.example, str):
|
||||
example_formatted = f'"{prop_info.example}"'
|
||||
else:
|
||||
example_formatted = prop_info.example
|
||||
field_str += f", examples=[{example_formatted}]"
|
||||
|
||||
return f"Field({field_str})"
|
||||
|
||||
|
||||
# main()
|
||||
if __name__ == "__main__":
|
||||
import yaml
|
||||
|
||||
from heroserver.openrpc.generator.generator import ClientGenerator
|
||||
|
||||
with open("/root/code/git.threefold.info/projectmycelium/hero_server/generatorexamples/mycelium_openrpc.yaml", "r") as file:
|
||||
data = yaml.safe_load(file)
|
||||
# print(data)
|
||||
spec_object = OpenRPCSpec.load(data)
|
||||
python_code_generator = PythonCodeGenerator()
|
||||
generator = ClientGenerator(
|
||||
spec_object,
|
||||
python_code_generator,
|
||||
"/tmp/python_client.py",
|
||||
)
|
||||
|
||||
generator.generate_client()
|
@ -0,0 +1,4 @@
|
||||
class {{ class_name }}(BaseModel):
|
||||
{% for prop_name, prop_info in properties.items() -%}
|
||||
{{ prop_name }}: {{prop_info.type_name}} = {{python_code_generator.get_pydantic_field_params(prop_info)}}
|
||||
{% endfor %}
|
18
_archive/openrpc/generator/code/python/templates/enum.jinja
Normal file
18
_archive/openrpc/generator/code/python/templates/enum.jinja
Normal file
@ -0,0 +1,18 @@
|
||||
{% if is_integer %}
|
||||
class {{ type_name }}(Enum):
|
||||
{% for elem in enum -%}
|
||||
{{ number_to_words(elem) }} = {{ elem }}
|
||||
{% endfor %}
|
||||
{% else -%}
|
||||
class {{ type_name }}(str, Enum):
|
||||
{% for elem in enum -%}
|
||||
{{ elem.upper() }} = '{{ elem }}'
|
||||
{% endfor %}
|
||||
{% endif %}
|
||||
{# @classmethod
|
||||
def load(cls, data: Dict[str, Any]) -> "{{type_name}}":
|
||||
return cls(
|
||||
{% for elem in enum -%}
|
||||
{{elem}} = data.get('{{elem}}'),
|
||||
{% endfor %}
|
||||
) #}
|
@ -0,0 +1,30 @@
|
||||
{% if method_example != "" -%}
|
||||
# Example:
|
||||
# {{ method_example }}
|
||||
{% endif -%}
|
||||
def {{ function_name }}({{ python_code_generator.get_method_params(method_params) }}){% if method_result %} -> {{ method_result }}{% endif %}:
|
||||
{% if method_description != "" -%}
|
||||
"""
|
||||
{{ method_description }}
|
||||
"""
|
||||
{% endif -%}
|
||||
url = "{{base_url}}"
|
||||
headers = {"content-type": "application/json"}
|
||||
|
||||
params = {
|
||||
{% for param_name, param_type in method_params.items() -%}
|
||||
'{{ param_name }}': {{ param_name }},
|
||||
{% endfor -%}
|
||||
}
|
||||
|
||||
response = requests.post(url, json={"jsonrpc": "2.0", "id": 0, 'method': '{{ method_name }}', 'params': params}, headers=headers).json()
|
||||
|
||||
{% if return_type -%}
|
||||
{% if python_code_generator.is_primitive(return_type) -%}
|
||||
return response['result']
|
||||
{% else -%}
|
||||
return {{return_type}}(response['result'])
|
||||
{% endif -%}
|
||||
{% else -%}
|
||||
response.raise_for_status()
|
||||
{% endif -%}
|
@ -0,0 +1,5 @@
|
||||
from typing import List, Optional, Union, Any, Dict
|
||||
from pydantic import BaseModel, Field
|
||||
from enum import Enum
|
||||
import requests
|
||||
|
205
_archive/openrpc/generator/code/vlang/handler_generator.py
Normal file
205
_archive/openrpc/generator/code/vlang/handler_generator.py
Normal file
@ -0,0 +1,205 @@
|
||||
import os
|
||||
from pathlib import Path
|
||||
from typing import Dict, Union
|
||||
|
||||
from jinja2 import Environment, FileSystemLoader
|
||||
|
||||
from heroserver.openrpc.generator.model_generator import ModelGenerator
|
||||
from heroserver.openrpc.generator.vlang.vlang_code_generator import VlangGenerator
|
||||
from heroserver.openrpc.model.common import ContentDescriptorObject, ReferenceObject
|
||||
from heroserver.openrpc.model.methods import MethodObject
|
||||
from heroserver.openrpc.model.openrpc_spec import OpenRPCSpec
|
||||
|
||||
script_dir = os.path.dirname(os.path.abspath(__file__))
|
||||
env = Environment(loader=FileSystemLoader(script_dir))
|
||||
|
||||
|
||||
def get_actor_executor_name(actor: str) -> str:
|
||||
return f"{''.join([part.title() for part in actor.split('_')])}Executor"
|
||||
|
||||
|
||||
class ActorGenerator:
|
||||
def __init__(self, actor: str, spec: OpenRPCSpec, dir: Path) -> None:
|
||||
self.spec = spec
|
||||
self.actor = actor
|
||||
self.dir = dir
|
||||
self.model_generator = ModelGenerator(spec, VlangGenerator())
|
||||
self.executor_template = env.get_template("templates/executor.jinja")
|
||||
self.pre_template = env.get_template("templates/pre.jinja")
|
||||
self.internal_crud_methods_template = env.get_template("templates/internal_crud_methods.jinja")
|
||||
self.internal_actor_method_template = env.get_template("templates/internal_actor_method.jinja")
|
||||
|
||||
def generate(self):
|
||||
self.generate_models()
|
||||
self.generate_crud()
|
||||
self.generate_internal_actor_methods()
|
||||
self.generate_executor()
|
||||
|
||||
def generate_models(self):
|
||||
pre = self.pre_template.render(module_name="myhandler", imports=[])
|
||||
code = self.model_generator.generate_models()
|
||||
path = self.dir.joinpath(f"{self.actor}_models.v")
|
||||
|
||||
with open(path, "w") as file:
|
||||
file.write(f"{pre}\n\n{code}\n")
|
||||
|
||||
def generate_crud(self):
|
||||
imports = self.pre_template.render(
|
||||
module_name="myhandler",
|
||||
imports=["json", "freeflowuniverse.crystallib.baobab.backend"],
|
||||
)
|
||||
methods = ""
|
||||
for path_str in self.model_generator.spec.get_root_objects().keys():
|
||||
object = self.model_generator.processed_objects[path_str]
|
||||
if object["code"] == "":
|
||||
continue
|
||||
|
||||
type_name = object["name"]
|
||||
variable_name = type_name.lower()
|
||||
methods += (
|
||||
self.internal_crud_methods_template.render(
|
||||
variable_name=variable_name,
|
||||
type_name=type_name,
|
||||
actor_executor_name=get_actor_executor_name(self.actor),
|
||||
)
|
||||
+ "\n\n"
|
||||
)
|
||||
|
||||
path = self.dir.joinpath(f"{self.actor}_crud.v")
|
||||
with open(path, "w") as file:
|
||||
file.write(f"{imports}\n\n{methods}")
|
||||
|
||||
def generate_internal_actor_methods(self):
|
||||
pre = self.pre_template.render(module_name="myhandler", imports=[])
|
||||
for method in self.spec.methods:
|
||||
function_name = method.name.lower().replace(".", "_") + "_internal"
|
||||
file_path = self.dir.joinpath(f"{self.actor}_{function_name}.v")
|
||||
if file_path.exists():
|
||||
continue
|
||||
|
||||
if any(method.name.endswith(end) for end in ["get", "set", "delete"]):
|
||||
continue
|
||||
|
||||
params: Dict[str, str] = {}
|
||||
for param in method.params:
|
||||
params[param.name] = self.model_generator.jsonschema_to_type(["methods", method.name, "params", param.name], param.schema)
|
||||
|
||||
return_type = self.get_method_return_type(method)
|
||||
method_params = ", ".join([f"{param.name} {self.get_param_type(method.name, param)}" for param in method.params])
|
||||
|
||||
code = self.internal_actor_method_template.render(
|
||||
function_name=function_name,
|
||||
method_params=method_params,
|
||||
return_type=return_type,
|
||||
actor_executor_name=get_actor_executor_name(self.actor),
|
||||
)
|
||||
|
||||
with open(file_path, "w") as file:
|
||||
file.write(f"{pre}\n\n{code}")
|
||||
|
||||
def generate_executor(self):
|
||||
pre = self.pre_template.render(
|
||||
module_name="myhandler",
|
||||
imports=[
|
||||
"x.json2",
|
||||
"json",
|
||||
"freeflowuniverse.crystallib.clients.redisclient",
|
||||
"freeflowuniverse.crystallib.baobab.backend",
|
||||
"freeflowuniverse.crystallib.rpc.jsonrpc",
|
||||
],
|
||||
)
|
||||
|
||||
code = self.executor_template.render(
|
||||
generator=self,
|
||||
actor_executor_name=get_actor_executor_name(self.actor),
|
||||
methods=self.spec.methods,
|
||||
)
|
||||
|
||||
path = self.dir.joinpath(f"{self.actor}_executor.v")
|
||||
with open(path, "w") as file:
|
||||
file.write(f"{pre}\n\n{code}")
|
||||
|
||||
def get_param_type(
|
||||
self,
|
||||
method_name: str,
|
||||
param: Union[ContentDescriptorObject, ReferenceObject],
|
||||
) -> str:
|
||||
type_name = self.model_generator.jsonschema_to_type(["methods", method_name, "params", param.name], param.schema)
|
||||
return type_name
|
||||
|
||||
def get_method_return_type(self, method: MethodObject) -> str:
|
||||
if not method.result:
|
||||
return ""
|
||||
|
||||
path = ["methods", method.name, "result"]
|
||||
schema = method.result
|
||||
if isinstance(method.result, ContentDescriptorObject):
|
||||
schema = method.result.schema
|
||||
|
||||
return self.model_generator.jsonschema_to_type(path, schema)
|
||||
|
||||
def is_primitive(self, type_name: str) -> bool:
|
||||
return self.model_generator.lang_code_generator.is_primitive(type_name)
|
||||
|
||||
def get_method_params_as_args(self, method: MethodObject) -> str:
|
||||
return ", ".join([param.name for param in method.params])
|
||||
|
||||
|
||||
class Generator:
|
||||
def generate_handler(self, specs_dir: Path, output_dir: Path):
|
||||
output_dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
handler_template = env.get_template("templates/handler.jinja")
|
||||
handler_test_template = env.get_template("templates/handler_test.jinja")
|
||||
pre_template = env.get_template("templates/pre.jinja")
|
||||
actors = []
|
||||
method_names = []
|
||||
|
||||
pre = pre_template.render(
|
||||
module_name="myhandler",
|
||||
imports=[
|
||||
"freeflowuniverse.crystallib.clients.redisclient",
|
||||
"freeflowuniverse.crystallib.baobab.backend",
|
||||
"freeflowuniverse.crystallib.rpc.jsonrpc",
|
||||
],
|
||||
)
|
||||
code = ""
|
||||
for item in specs_dir.iterdir():
|
||||
if not item.is_dir():
|
||||
continue
|
||||
|
||||
actors.append(item.name)
|
||||
|
||||
data = parser(path=item.as_posix())
|
||||
openrpc_spec = OpenRPCSpec.load(data)
|
||||
actor_generator = ActorGenerator(item.name, openrpc_spec, output_dir)
|
||||
actor_generator.generate()
|
||||
|
||||
for method in openrpc_spec.methods:
|
||||
method_names.append(f"{item.name}.{method.name}")
|
||||
|
||||
code = handler_template.render(actors=actors, get_actor_executor_name=get_actor_executor_name)
|
||||
|
||||
handler_path = output_dir.joinpath("handler.v")
|
||||
with open(handler_path, "w") as file:
|
||||
file.write(f"{pre}\n\n{code}")
|
||||
|
||||
handler_test_path = output_dir.joinpath("handler_test.v")
|
||||
with open(handler_test_path, "w") as file:
|
||||
file.write(handler_test_template.render(method_names=method_names))
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
from heroserver.openrpc.parser.parser import parser
|
||||
|
||||
generator = Generator()
|
||||
path = "~/code/git.threefold.info/projectmycelium/hero_server/generatorexamples/example1/specs"
|
||||
generator.generate_handler(Path(path), Path("/tmp/myhandler"))
|
||||
# vlang_code_generator = VlangGenerator()
|
||||
# generator = ClientGenerator(
|
||||
# spec_object,
|
||||
# vlang_code_generator,
|
||||
# "/tmp/v_client_new.v",
|
||||
# )
|
||||
|
||||
# generator.generate_client()
|
@ -0,0 +1,9 @@
|
||||
pub enum {{ type_name }}{
|
||||
{% for elem in enum -%}
|
||||
{% if is_integer -%}
|
||||
{{ number_to_words(elem) }} = {{ elem }}
|
||||
{% else -%}
|
||||
{{ elem }}
|
||||
{% endif -%}
|
||||
{% endfor %}
|
||||
}
|
@ -0,0 +1,77 @@
|
||||
pub struct {{ actor_executor_name }}{
|
||||
pub mut:
|
||||
db &backend.Backend
|
||||
redis &redisclient.Redis
|
||||
}
|
||||
|
||||
pub fn (mut executor {{ actor_executor_name }}) execute(rpc_msg_id string, rpc_msg_method string, rpc_msg_params_str string) {
|
||||
raw_params := json2.raw_decode(rpc_msg_params_str) or{
|
||||
executor.return_error(rpc_msg_id, jsonrpc.invalid_params)
|
||||
return
|
||||
}
|
||||
|
||||
params_arr := raw_params.arr()
|
||||
|
||||
match rpc_msg_method {
|
||||
{%- for method in methods %}
|
||||
'{{method.name}}' {
|
||||
{%- for param in method.params %}
|
||||
{%- if generator.is_primitive(generator.get_param_type(method.name, param))%}
|
||||
{{param.name}} := params_arr[{{loop.index0}}] as {{generator.get_param_type(method.name, param)}}
|
||||
{%- else %}
|
||||
{{param.name}} := json.decode({{generator.get_param_type(method.name, param)}}, params_arr[{{loop.index0}}].json_str()) or {
|
||||
executor.return_error(rpc_msg_id, jsonrpc.invalid_request)
|
||||
return
|
||||
}
|
||||
{%- endif %}
|
||||
{%- endfor %}
|
||||
|
||||
{%- if generator.get_method_return_type(method) == 'none' %}
|
||||
executor.{{method.name}}_internal({{generator.get_method_params_as_args(method)}}) or {
|
||||
executor.return_error(rpc_msg_id, jsonrpc.InnerJsonRpcError{
|
||||
code: 32000
|
||||
message: '${err}'
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
response := jsonrpc.JsonRpcResponse[string]{
|
||||
jsonrpc: '2.0.0'
|
||||
id: rpc_msg_id
|
||||
result: ''
|
||||
}
|
||||
{%- else %}
|
||||
result := executor.{{method.name}}_internal({{generator.get_method_params_as_args(method)}}) or {
|
||||
executor.return_error(rpc_msg_id, jsonrpc.InnerJsonRpcError{
|
||||
code: 32000
|
||||
message: '${err}'
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
response := jsonrpc.JsonRpcResponse[{{generator.get_method_return_type(method)}}]{
|
||||
jsonrpc: '2.0.0'
|
||||
id: rpc_msg_id
|
||||
result: result
|
||||
}
|
||||
{%- endif %}
|
||||
|
||||
// put response in response queue
|
||||
executor.redis.lpush(rpc_msg_id, response.to_json()) or {
|
||||
println('failed to push response for ${rpc_msg_id} to redis queue: ${err}')
|
||||
}
|
||||
}
|
||||
{%- endfor %}
|
||||
else {
|
||||
executor.return_error(rpc_msg_id, jsonrpc.method_not_found)
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn (mut executor {{actor_executor_name}}) return_error(rpc_msg_id string, error jsonrpc.InnerJsonRpcError){
|
||||
response := jsonrpc.new_jsonrpcerror(rpc_msg_id, error)
|
||||
executor.redis.lpush(rpc_msg_id, response.to_json()) or {
|
||||
println('failed to push response for ${rpc_msg_id} to redis queue: ${err}')
|
||||
}
|
||||
}
|
@ -0,0 +1,50 @@
|
||||
struct Handler {
|
||||
pub mut:
|
||||
db &backend.Backend
|
||||
redis &redisclient.Redis
|
||||
{% for actor in actors %}
|
||||
{{actor}}_executor {{get_actor_executor_name(actor)}}
|
||||
{%- endfor %}
|
||||
}
|
||||
|
||||
pub fn new(db_config backend.BackendConfig, redis_addr string) !Handler{
|
||||
db := backend.new(db_config)!
|
||||
mut redis_client := redisclient.new([redis_addr])!
|
||||
redis_client.selectdb(0)!
|
||||
|
||||
return Handler{
|
||||
db: &db
|
||||
redis: &redis_client
|
||||
{%- for actor in actors %}
|
||||
{{actor}}_executor: {{get_actor_executor_name(actor)}}{
|
||||
db: &db
|
||||
redis: &redis_client
|
||||
}
|
||||
{%- endfor %}
|
||||
}
|
||||
}
|
||||
|
||||
// handle handles an incoming JSON-RPC encoded message and returns an encoded response
|
||||
pub fn (mut handler Handler) handle(id string, method string, params_str string) {
|
||||
actor := method.all_before('.')
|
||||
method_name := method.all_after('.')
|
||||
|
||||
match actor {
|
||||
{%- for actor in actors %}
|
||||
'{{ actor }}' {
|
||||
spawn (&handler.{{actor}}_executor).execute(id, method_name, params_str)
|
||||
}
|
||||
{%- endfor %}
|
||||
else {
|
||||
handler.return_error(id, jsonrpc.method_not_found)
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn (mut handler Handler) return_error(rpc_msg_id string, error jsonrpc.InnerJsonRpcError){
|
||||
response := jsonrpc.new_jsonrpcerror(rpc_msg_id, error)
|
||||
handler.redis.lpush(rpc_msg_id, response.to_json()) or {
|
||||
println('failed to push response for ${rpc_msg_id} to redis queue: ${err}')
|
||||
}
|
||||
}
|
@ -0,0 +1,31 @@
|
||||
module myhandler
|
||||
|
||||
import x.json2
|
||||
import rand
|
||||
import freeflowuniverse.crystallib.baobab.backend
|
||||
|
||||
fn test_handler(){
|
||||
db_config := backend.BackendConfig{
|
||||
name: 'myhandler'
|
||||
secret: 'secret'
|
||||
reset: true
|
||||
db_type: .postgres
|
||||
}
|
||||
|
||||
mut handler := new(db_config, '127.0.0.1:6379')!
|
||||
{% for method_name in method_names %}
|
||||
do_request(mut handler, '{{method_name}}')!
|
||||
{%- endfor %}
|
||||
}
|
||||
|
||||
fn do_request(mut handler Handler, method_name string) ! {
|
||||
// TODO: edit input parameters
|
||||
mut params := []json2.Any{}
|
||||
params << "objid"
|
||||
params << "blabla_name"
|
||||
params_str := json2.Any(params).json_str()
|
||||
|
||||
id := rand.string(6)
|
||||
handler.handle(rand.string(6), method_name, json2.Any(params).json_str())
|
||||
println('request id: ${id}')
|
||||
}
|
@ -0,0 +1,7 @@
|
||||
pub fn (mut executor {{ actor_executor_name }}) {{function_name}}({{method_params}}) !{{return_type}}{
|
||||
// context allows us to see who the user is and which groups the user is
|
||||
// context also gives a logging feature
|
||||
// context is linked to 1 circle
|
||||
// context is linked to a DB (OSIS)
|
||||
panic('implement')
|
||||
}
|
@ -0,0 +1,28 @@
|
||||
pub fn (mut executor {{ actor_executor_name }}) {{variable_name}}_get_internal(id string) !{{type_name}}{
|
||||
json_str := executor.db.indexer.get_json(id, backend.RootObject{
|
||||
name: '{{type_name}}'
|
||||
})!
|
||||
|
||||
return json.decode({{type_name}}, json_str)!
|
||||
}
|
||||
|
||||
pub fn (mut executor {{ actor_executor_name }}) {{variable_name}}_set_internal({{variable_name}} {{type_name}}) !{
|
||||
if {{variable_name}}.oid != ''{
|
||||
executor.db.indexer.set(backend.RootObject{
|
||||
id: {{variable_name}}.oid
|
||||
name: '{{type_name}}'
|
||||
})!
|
||||
}
|
||||
|
||||
executor.db.indexer.new(backend.RootObject{
|
||||
name: '{{type_name}}'
|
||||
})!
|
||||
}
|
||||
|
||||
pub fn (mut executor {{ actor_executor_name }}) {{variable_name}}_delete_internal(id string) !{
|
||||
executor.db.indexer.delete(id, backend.RootObject{
|
||||
name: '{{type_name}}'
|
||||
})!
|
||||
}
|
||||
|
||||
|
@ -0,0 +1,5 @@
|
||||
pub struct {{method_param_struct_name}}{
|
||||
{% for param_name, param_type in params.items()%}
|
||||
{{param_name}} {{param_type}}
|
||||
{%- endfor %}
|
||||
}
|
@ -0,0 +1,75 @@
|
||||
{% if method_example -%}
|
||||
/*
|
||||
Example:
|
||||
{{ method_example }}
|
||||
*/
|
||||
{% endif -%}
|
||||
|
||||
{% if method_description -%}
|
||||
/*
|
||||
{{ method_description }}
|
||||
*/
|
||||
{% endif -%}
|
||||
pub fn {{ function_name }}({{ vlang_code_generator.get_method_params(method_params) }}) {{ method_result }}{
|
||||
mut conn := httpconnection.new(
|
||||
name: 'openrpc_client'
|
||||
url: '{{ base_url }}'
|
||||
)!
|
||||
|
||||
mut params := map[string]json2.Any{}
|
||||
{% for param_name, param_type in method_params.items() -%}
|
||||
{% if vlang_code_generator.is_primitive(param_type) %}
|
||||
params["{{ param_name }}"] = {{ param_name }}
|
||||
{% elif vlang_code_generator.is_vlang_array(param_type) %}
|
||||
mut any_arr := []json2.Any{}
|
||||
for item in {{ param_name }}{
|
||||
{% if vlang_code_generator.is_primitive(param_type[2:]) %}
|
||||
any_arr << item
|
||||
{% else %}
|
||||
any_arr << json2.raw_decode(json2.encode(item))!
|
||||
{% endif %}
|
||||
}
|
||||
params["{{ param_name }}"] = json2.Any(any_arr)
|
||||
{%else %}
|
||||
params["{{ param_name }}"] = json2.raw_decode(json2.encode({{ param_name }}))!
|
||||
{% endif %}
|
||||
{% endfor -%}
|
||||
|
||||
mut payload := map[string]json2.Any{}
|
||||
payload['jsonrpc'] = "2.0"
|
||||
payload['id'] = 0
|
||||
payload['method'] = '{{ method_name }}'
|
||||
payload['params'] = params
|
||||
|
||||
response := conn.send(method: .post, data: json2.encode(payload){% if url_path -%}, prefix: '{{ url_path }}' {% endif -%})!
|
||||
if !response.is_ok() {
|
||||
return error('failed to make rpc request: (${response.code}) ${response.data}')
|
||||
}
|
||||
|
||||
{% if return_type != 'none' %}
|
||||
mp := json2.raw_decode(response.data)!.as_map()
|
||||
res := mp['result'] or {
|
||||
return error('invalid jsonrpc result: ${response.data}')
|
||||
}
|
||||
|
||||
if res is json2.Null{
|
||||
return error('not found')
|
||||
}
|
||||
|
||||
{% if vlang_code_generator.is_primitive(return_type) %}
|
||||
return res as {{return_type}}
|
||||
{% elif vlang_code_generator.is_vlang_array(return_type) %}
|
||||
mut res_arr := {{return_type}}
|
||||
for item in res.arr() {
|
||||
{% if vlang_code_generator.is_primitive(return_type[2:]) %}
|
||||
res_arr << item as {{return_type}}
|
||||
{% else %}
|
||||
res_arr << json2.decode[{{return_type[2:]}}](item.json_str())!
|
||||
{% endif %}
|
||||
}
|
||||
return res_arr
|
||||
{%else %}
|
||||
return json2.decode[{{return_type}}](res.json_str())!
|
||||
{% endif -%}
|
||||
{% endif %}
|
||||
}
|
@ -0,0 +1,5 @@
|
||||
module {{module_name}}
|
||||
{% for item in imports %}
|
||||
import {{item}}
|
||||
{%- endfor %}
|
||||
|
10
_archive/openrpc/generator/code/vlang/templates/struct.jinja
Normal file
10
_archive/openrpc/generator/code/vlang/templates/struct.jinja
Normal file
@ -0,0 +1,10 @@
|
||||
@[params]
|
||||
pub struct {{ type_name }}{
|
||||
pub mut:
|
||||
{%- for property_name, property_info in properties.items() %}
|
||||
{%- if property_info.description %}
|
||||
// {{ property_info.description }}
|
||||
{%- endif %}
|
||||
{{ property_name }} {{ property_info.type_name }}
|
||||
{%- endfor %}
|
||||
}
|
164
_archive/openrpc/generator/code/vlang/vlang_code_generator.py
Normal file
164
_archive/openrpc/generator/code/vlang/vlang_code_generator.py
Normal file
@ -0,0 +1,164 @@
|
||||
import json
|
||||
import os
|
||||
from typing import Any, Dict, List
|
||||
from urllib.parse import ParseResult
|
||||
|
||||
import inflect
|
||||
from jinja2 import Environment, FileSystemLoader
|
||||
|
||||
from heroserver.openrpc.generator.lang_code_generator import LangCodeGenerator, PropertyInfo
|
||||
from heroserver.openrpc.model.common import (
|
||||
ReferenceObject,
|
||||
SchemaObject,
|
||||
)
|
||||
from heroserver.openrpc.model.methods import MethodObject
|
||||
from heroserver.openrpc.model.openrpc_spec import (
|
||||
OpenRPCSpec,
|
||||
)
|
||||
|
||||
script_dir = os.path.dirname(os.path.abspath(__file__))
|
||||
env = Environment(loader=FileSystemLoader(script_dir))
|
||||
inflector = inflect.engine()
|
||||
|
||||
|
||||
class VlangGenerator(LangCodeGenerator):
|
||||
def __init__(self) -> None:
|
||||
self.struct_template = env.get_template("templates/struct.jinja")
|
||||
self.enum_template = env.get_template("templates/enum.jinja")
|
||||
self.methods_template = env.get_template("templates/methods.jinja")
|
||||
self.pre_template = env.get_template("templates/pre.jinja")
|
||||
|
||||
def generate_imports(self) -> str:
|
||||
return self.pre_template.render()
|
||||
|
||||
def generate_object(
|
||||
self,
|
||||
type_name: str,
|
||||
properties: Dict[str, PropertyInfo],
|
||||
):
|
||||
return self.struct_template.render(type_name=type_name, properties=properties)
|
||||
|
||||
def generate_method(
|
||||
self,
|
||||
method_spec: MethodObject,
|
||||
url: ParseResult,
|
||||
params: Dict[str, str],
|
||||
return_type: str,
|
||||
) -> str:
|
||||
function_name = method_spec.name.lower().replace(".", "_")
|
||||
method_name = method_spec.name
|
||||
method_result = self.type_to_method_result(return_type)
|
||||
method_description = ""
|
||||
if method_spec.description:
|
||||
method_description = method_spec.description.replace("'", " ")
|
||||
|
||||
method_example = ""
|
||||
if method_spec.examples and len(method_spec.examples) > 0:
|
||||
method_example = json.dumps(method_spec.examples[0], indent=4)
|
||||
|
||||
method_code = self.methods_template.render(
|
||||
vlang_code_generator=self,
|
||||
base_url=f"{url.scheme}://{url.netloc}",
|
||||
url_path=url.path,
|
||||
function_name=function_name,
|
||||
method_name=method_name,
|
||||
method_params=params,
|
||||
method_result=method_result,
|
||||
return_type=return_type,
|
||||
method_description=method_description,
|
||||
method_example=method_example,
|
||||
)
|
||||
|
||||
return method_code
|
||||
|
||||
def string_primitive(self) -> str:
|
||||
return "string"
|
||||
|
||||
def integer_primitive(self) -> str:
|
||||
return "i64"
|
||||
|
||||
def number_primitive(self) -> str:
|
||||
return "f64"
|
||||
|
||||
def null_primitive(self) -> str:
|
||||
return "none"
|
||||
|
||||
def bool_primitive(self) -> str:
|
||||
return "bool"
|
||||
|
||||
def array_of_type(self, type_name: str) -> str:
|
||||
return f"[]{type_name}"
|
||||
|
||||
def generate_multitype(self, types: List[str]) -> str:
|
||||
if len(types) > 2:
|
||||
raise Exception("only a type and null are supported with anyOf/allOf keyword")
|
||||
|
||||
if len(types) == 1:
|
||||
return types[0]
|
||||
|
||||
if types[0] == "none":
|
||||
return f"?{types[1]}"
|
||||
if types[1] == "none":
|
||||
return f"?{types[0]}"
|
||||
|
||||
raise Exception("only a type and null are supported with anyOf/allOf keyword")
|
||||
|
||||
def encapsulate_types(self, path: List[str], types: List[SchemaObject | ReferenceObject]) -> str:
|
||||
raise Exception("no support for allOf keyword")
|
||||
|
||||
def generate_enum(self, enum: List[Any], type_name: str) -> str:
|
||||
if all(isinstance(elem, str) for elem in enum):
|
||||
# enum of strings
|
||||
return self.enum_template.render(
|
||||
enum=enum,
|
||||
type_name=type_name,
|
||||
number_to_words=inflector.number_to_words,
|
||||
)
|
||||
|
||||
elif all(isinstance(elem, int) for elem in enum):
|
||||
# enum of integers
|
||||
return self.enum_template.render(
|
||||
is_integer=True,
|
||||
enum=enum,
|
||||
type_name=type_name,
|
||||
number_to_words=inflector.number_to_words,
|
||||
)
|
||||
|
||||
else:
|
||||
raise Exception(f"failed to generate enum code for: {enum}")
|
||||
|
||||
def type_to_method_result(self, type_name: str) -> str:
|
||||
if type_name == "none":
|
||||
type_name = ""
|
||||
|
||||
if type_name.startswith("?"):
|
||||
type_name = type_name[1:]
|
||||
|
||||
return "!" + type_name
|
||||
|
||||
def is_primitive(self, type: str) -> bool:
|
||||
return type in ["u64", "f64", "i64", "int", "bool", "string"]
|
||||
|
||||
def is_vlang_array(self, type: str) -> bool:
|
||||
return type.startswith("[]")
|
||||
|
||||
def get_method_params(self, method_params: Dict[str, str]) -> str:
|
||||
return ", ".join([f"{param_name} {param_type}" for param_name, param_type in method_params.items()])
|
||||
|
||||
|
||||
# main()
|
||||
if __name__ == "__main__":
|
||||
from heroserver.openrpc.generator.generator import ClientGenerator
|
||||
from heroserver.openrpc.parser.parser import parser
|
||||
|
||||
data = parser(path="~/code/git.threefold.info/projectmycelium/hero_server/lib/openrpclib/parser/examples")
|
||||
|
||||
spec_object = OpenRPCSpec.load(data)
|
||||
vlang_code_generator = VlangGenerator()
|
||||
generator = ClientGenerator(
|
||||
spec_object,
|
||||
vlang_code_generator,
|
||||
"/tmp/v_client_new.v",
|
||||
)
|
||||
|
||||
generator.generate_client()
|
46
_archive/openrpc/generator/hero_generator.py
Normal file
46
_archive/openrpc/generator/hero_generator.py
Normal file
@ -0,0 +1,46 @@
|
||||
import argparse
|
||||
from pathlib import Path
|
||||
|
||||
from heroserver.openrpc.generator.rest_server.python.rest_server_generator import (
|
||||
RestServerGenerator,
|
||||
)
|
||||
from heroserver.openrpc.model.openrpc_spec import OpenRPCSpec
|
||||
from heroserver.openrpc.parser.parser import parser
|
||||
|
||||
|
||||
def do(specs_dir: Path, output: Path):
|
||||
for item in specs_dir.iterdir():
|
||||
if not item.is_dir():
|
||||
continue
|
||||
|
||||
actor_name = item.name
|
||||
actor_output_path = output.joinpath(actor_name)
|
||||
actor_output_path.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
print(f"item: {item.as_posix()}")
|
||||
# if item.as_posix() == "generatorexamples/example1/specs/storymanager":
|
||||
# continue
|
||||
data = parser(path=item.as_posix())
|
||||
# print(f"data: {data}")
|
||||
spec_object = OpenRPCSpec.load(data)
|
||||
server_generator = RestServerGenerator(spec_object, actor_output_path)
|
||||
server_generator.generate()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
arg_parser = argparse.ArgumentParser(description="Hero server and client generator tool.")
|
||||
arg_parser.add_argument(
|
||||
"--specs",
|
||||
type=str,
|
||||
required=True,
|
||||
help="specs directory",
|
||||
)
|
||||
arg_parser.add_argument(
|
||||
"--output",
|
||||
type=str,
|
||||
required=True,
|
||||
help="output directory",
|
||||
)
|
||||
|
||||
args = arg_parser.parse_args()
|
||||
do(Path(args.specs), Path(args.output))
|
90
_archive/openrpc/generator/mdbook/generate_mdbook.py
Normal file
90
_archive/openrpc/generator/mdbook/generate_mdbook.py
Normal file
@ -0,0 +1,90 @@
|
||||
import argparse
|
||||
import json
|
||||
import os
|
||||
|
||||
from jinja2 import Environment, FileSystemLoader
|
||||
|
||||
from ....openrpc.tools import get_pydantic_type, get_return_type, topological_sort
|
||||
|
||||
script_dir = os.path.dirname(os.path.abspath(__file__))
|
||||
|
||||
|
||||
def generate_models(openrpc_spec: dict) -> str:
|
||||
schema_dict = openrpc_spec["components"]["schemas"]
|
||||
sorted_classes = topological_sort(schema_dict)
|
||||
|
||||
env = Environment(loader=FileSystemLoader(script_dir), trim_blocks=True, lstrip_blocks=True)
|
||||
template = env.get_template("templates/mdbook/structs.jinja")
|
||||
model_code = template.render(
|
||||
sorted_classes=sorted_classes,
|
||||
schema_dict=schema_dict,
|
||||
get_pydantic_type=get_pydantic_type,
|
||||
)
|
||||
|
||||
return model_code
|
||||
|
||||
|
||||
def generate_model(model_name: str, schema: dict) -> str:
|
||||
env = Environment(loader=FileSystemLoader(script_dir))
|
||||
template = env.get_template("templates/vlang/struct.jinja")
|
||||
model_code = template.render(model_name=model_name, schema=schema, get_pydantic_type=get_pydantic_type)
|
||||
|
||||
return model_code
|
||||
|
||||
|
||||
def generate_api_methods(openrpc_spec: dict) -> str:
|
||||
env = Environment(loader=FileSystemLoader(script_dir), trim_blocks=True, lstrip_blocks=True)
|
||||
template = env.get_template("templates/mdbook/methods.jinja")
|
||||
|
||||
code = template.render(
|
||||
spec=openrpc_spec,
|
||||
methods=openrpc_spec.get("methods", []),
|
||||
get_return_type=get_return_type,
|
||||
get_pydantic_type=get_pydantic_type,
|
||||
)
|
||||
|
||||
return code
|
||||
|
||||
|
||||
def main() -> None:
|
||||
parser = argparse.ArgumentParser(description="Generate API code from OpenRPC specification")
|
||||
parser.add_argument(
|
||||
"-s",
|
||||
"--spec",
|
||||
help="Path to the specs (expressed in our own V format)",
|
||||
default="~/code/git.threefold.info/projectmycelium/hero_server/generatorexamples/example1/specs",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-o",
|
||||
"--output",
|
||||
default="/tmp/generator/mdbook",
|
||||
help="Output file path (default: /tmp/generator/mdbook)",
|
||||
)
|
||||
args = parser.parse_args()
|
||||
|
||||
spec_file = os.path.expanduser(args.spec)
|
||||
output_dir = os.path.expanduser(args.output)
|
||||
|
||||
if not os.path.isfile(spec_file):
|
||||
print(f"Error: OpenRPC specification file '{spec_file}' does not exist.")
|
||||
return
|
||||
|
||||
with open(spec_file) as file:
|
||||
openrpc_spec = json.load(file)
|
||||
|
||||
code_models = generate_models(openrpc_spec)
|
||||
code_methods = generate_api_methods(openrpc_spec)
|
||||
|
||||
os.makedirs(os.path.dirname(output_dir), exist_ok=True)
|
||||
|
||||
# Write the generated code to a file
|
||||
with open(f"{output_dir}/models.md", "w") as file:
|
||||
file.write(code_models)
|
||||
with open(f"{output_dir}/methods.md", "w") as file:
|
||||
file.write(code_methods)
|
||||
|
||||
print(f"Generated API code has been written to {output_dir}")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
16
_archive/openrpc/generator/mdbook/templates/methods.jinja
Normal file
16
_archive/openrpc/generator/mdbook/templates/methods.jinja
Normal file
@ -0,0 +1,16 @@
|
||||
## Methods
|
||||
|
||||
{% for method in methods %}
|
||||
- {{ method['name'] }}: {{ method.get('description', '') }}
|
||||
- Parameters:
|
||||
{% for param in method.get('params', []) %}
|
||||
{{ param['name'] }}: {{ get_pydantic_type(param['schema'])}}
|
||||
{% endfor %}
|
||||
|
||||
- Return Type:
|
||||
{{ get_return_type(method['result']) }}
|
||||
|
||||
- Example:
|
||||
{{ method.get('examples', [{}])[0] }}
|
||||
|
||||
{% endfor %}
|
@ -0,0 +1,9 @@
|
||||
# Classes
|
||||
|
||||
{% for class_name in sorted_classes %}
|
||||
- {{ schema_dict[class_name]['title'] }}
|
||||
{% for prop_name, prop in schema_dict[class_name]['properties'].items() %}
|
||||
- {{ prop_name }} ({{ get_pydantic_type(prop)}}): {{ prop['description'] }}
|
||||
{% endfor %}
|
||||
|
||||
{% endfor %}
|
170
_archive/openrpc/generator/model/model_generator.py
Normal file
170
_archive/openrpc/generator/model/model_generator.py
Normal file
@ -0,0 +1,170 @@
|
||||
from typing import Dict, List, Set
|
||||
|
||||
from heroserver.openrpc.generator.code.lang_code_generator import (
|
||||
LangCodeGenerator,
|
||||
PropertyInfo,
|
||||
)
|
||||
from heroserver.openrpc.model.common import (
|
||||
ContentDescriptorObject,
|
||||
ReferenceObject,
|
||||
SchemaObject,
|
||||
)
|
||||
from heroserver.openrpc.model.openrpc_spec import OpenRPCSpec
|
||||
|
||||
|
||||
class ModelGenerator:
|
||||
def __init__(self, spec: OpenRPCSpec, lang_code_generator: LangCodeGenerator) -> None:
|
||||
self.spec = spec
|
||||
self.lang_code_generator = lang_code_generator
|
||||
self.processed_objects: Dict[str, Dict[str, str]] = {}
|
||||
self.ordered_objects: List[str] = []
|
||||
self.used_names: Set[str] = set()
|
||||
|
||||
def generate_models(self):
|
||||
if not self.spec.components:
|
||||
return ""
|
||||
|
||||
schemas = self.spec.components.schemas
|
||||
schemas_path = ["components", "schemas"]
|
||||
for name, schema in schemas.items():
|
||||
self.jsonschema_to_type(
|
||||
path=schemas_path + [name],
|
||||
jsonschema=schema,
|
||||
)
|
||||
|
||||
objects_code = ""
|
||||
for val in self.ordered_objects:
|
||||
if val == "":
|
||||
continue
|
||||
objects_code = f"{objects_code}{val}\n\n"
|
||||
|
||||
print(f"debugzo4 {objects_code}")
|
||||
return objects_code
|
||||
|
||||
def jsonschema_to_type(self, path: List[str], jsonschema: SchemaObject | ReferenceObject) -> str:
|
||||
if isinstance(jsonschema, ReferenceObject):
|
||||
ref: str = jsonschema.ref
|
||||
|
||||
ref_schema = self.spec.ref_to_schema(ref)
|
||||
ref_path = ref.split("/")[1:]
|
||||
|
||||
if isinstance(ref_schema, ContentDescriptorObject):
|
||||
# TODO: implement
|
||||
raise Exception("unimplemented")
|
||||
# return self.content_descriptor_to_type(ref_path, ref_schema)
|
||||
|
||||
return self.jsonschema_to_type(ref_path, ref_schema)
|
||||
|
||||
path_str = "/".join([item.lower() for item in path])
|
||||
if path_str in self.processed_objects:
|
||||
return self.processed_objects[path_str]["name"]
|
||||
|
||||
type_name = self.type_name_from_path(path)
|
||||
|
||||
description = getattr(jsonschema, "description", None)
|
||||
if jsonschema.enum:
|
||||
enum = jsonschema.enum
|
||||
type_code = self.lang_code_generator.generate_enum(enum, type_name)
|
||||
if self.lang_code_generator.is_primitive(type_code):
|
||||
return type_code
|
||||
|
||||
self.add_object(path_str, type_code, type_name)
|
||||
return type_name
|
||||
|
||||
if jsonschema.type:
|
||||
match jsonschema.type:
|
||||
case "string":
|
||||
return self.lang_code_generator.string_primitive()
|
||||
|
||||
case "integer":
|
||||
return self.lang_code_generator.integer_primitive()
|
||||
|
||||
case "number":
|
||||
return self.lang_code_generator.number_primitive()
|
||||
|
||||
case "array":
|
||||
if isinstance(jsonschema.items, List):
|
||||
raise Exception("array of different item types is not supported")
|
||||
|
||||
item_type_name = self.jsonschema_to_type(path + ["item"], jsonschema.items)
|
||||
return self.lang_code_generator.array_of_type(item_type_name)
|
||||
|
||||
case "boolean":
|
||||
return self.lang_code_generator.bool_primitive()
|
||||
|
||||
case "object":
|
||||
# to prevent cyclic dependencies
|
||||
self.add_object(path_str, "", type_name)
|
||||
|
||||
properties: Dict[str, PropertyInfo] = {}
|
||||
for (
|
||||
property_name,
|
||||
property_schema,
|
||||
) in jsonschema.properties.items():
|
||||
schema = property_schema
|
||||
new_path = path + ["properties", property_name]
|
||||
if isinstance(property_schema, ReferenceObject):
|
||||
schema = self.spec.ref_to_schema(property_schema.ref)
|
||||
new_path = property_schema.ref.split("/")[1:]
|
||||
|
||||
property_info = PropertyInfo(
|
||||
name=property_name,
|
||||
type_name=self.jsonschema_to_type(new_path, schema),
|
||||
description=schema.description,
|
||||
example=schema.example,
|
||||
)
|
||||
|
||||
properties[property_name] = property_info
|
||||
|
||||
type_code = self.lang_code_generator.generate_object(type_name, properties)
|
||||
self.add_object(path_str, type_code, type_name)
|
||||
return type_name
|
||||
|
||||
case "null":
|
||||
return self.lang_code_generator.null_primitive()
|
||||
|
||||
case _:
|
||||
raise Exception(f"type {jsonschema.type} is not supported")
|
||||
|
||||
if jsonschema.anyOf:
|
||||
type_names = []
|
||||
for i, item in enumerate(jsonschema.anyOf):
|
||||
type_names.append(self.jsonschema_to_type(path + [f"anyOf{i}"], item))
|
||||
|
||||
return self.lang_code_generator.generate_multitype(type_names)
|
||||
# self.add_object(path_str, type_code, type_code)
|
||||
# return type_code
|
||||
|
||||
elif jsonschema.oneOf:
|
||||
type_names = []
|
||||
for i, item in enumerate(jsonschema.oneOf):
|
||||
type_names.append(self.jsonschema_to_type(path + [f"oneOf{i}"], item))
|
||||
|
||||
return self.lang_code_generator.generate_multitype(type_names)
|
||||
# self.add_object(path_str, type_code, type_code)
|
||||
# return type_code
|
||||
|
||||
elif jsonschema.allOf:
|
||||
return self.lang_code_generator.encapsulate_types(jsonschema.allOf)
|
||||
# self.add_object(path_str, type_code, type_code)
|
||||
# return type_name
|
||||
|
||||
raise Exception(f"type {jsonschema.type} is not supported")
|
||||
|
||||
def add_object(self, path_str: str, type_code: str, type_name: str):
|
||||
self.used_names.add(type_name)
|
||||
self.processed_objects[path_str] = {
|
||||
"code": type_code,
|
||||
"name": type_name,
|
||||
}
|
||||
print(f"debugzo21 {self.processed_objects[path_str]}")
|
||||
self.ordered_objects.append(type_code)
|
||||
|
||||
def type_name_from_path(self, path: List[str]) -> str:
|
||||
type_name = ""
|
||||
for item in reversed(path):
|
||||
type_name += item.title() if item.islower() else item
|
||||
if type_name not in self.used_names:
|
||||
return type_name
|
||||
|
||||
raise Exception(f"failed to generate unique name from path: {path}")
|
14
_archive/openrpc/generator/readme.md
Normal file
14
_archive/openrpc/generator/readme.md
Normal file
@ -0,0 +1,14 @@
|
||||
|
||||
## example how to use
|
||||
|
||||
```python
|
||||
|
||||
import heroserver.openrpc.generator
|
||||
|
||||
openrpc_spec = generator.openrpc_spec(
|
||||
path="~/code/git.threefold.info/projectmycelium/hero_server/generatorexamples/example1/specs"
|
||||
)
|
||||
|
||||
print(openrpc_spec)
|
||||
|
||||
```
|
@ -0,0 +1,28 @@
|
||||
from typing import Union
|
||||
|
||||
from fastapi import FastAPI
|
||||
from vm_manager__vm_start import vm_start
|
||||
|
||||
app = FastAPI()
|
||||
|
||||
#VM WOULD BE AN OBJECT of e.g. a virtual machine description
|
||||
|
||||
@app.get("/$circleguid/vm_manager/vm")
|
||||
def vm_get()-> VM:
|
||||
return {...}
|
||||
|
||||
|
||||
@app.post("/$circleguid/vm_manager/vm")
|
||||
def vm_set()-> bool:
|
||||
return True
|
||||
|
||||
@app.delete("/$circleguid/vm_manager/vm")
|
||||
def vm_delete()-> bool:
|
||||
##would use osis to delete this objecc
|
||||
return True
|
||||
|
||||
|
||||
@app.get("/$circleguid/vm_manager/vm_start/{vm_guid}")
|
||||
def vm_start(vm_guid: str) -> bool:
|
||||
vm_start(context=context,vm_guid=vm_guid)
|
||||
|
@ -0,0 +1,8 @@
|
||||
|
||||
def vm_start(context, vm_guid: str) -> bool:
|
||||
#context allows us to see who the user is and which groups the user is
|
||||
#context also gives a logging feature
|
||||
#context is linked to 1 circle
|
||||
#context is linked to a DB (OSIS)
|
||||
|
||||
#code to be implemented e.g. using DAGU to start a vm
|
@ -0,0 +1,256 @@
|
||||
import os
|
||||
from pathlib import Path
|
||||
from typing import Dict, List, Optional, Union
|
||||
|
||||
from jinja2 import Environment, FileSystemLoader
|
||||
|
||||
from heroserver.openrpc.generator.code.python.python_code_generator import PythonCodeGenerator
|
||||
from heroserver.openrpc.generator.model.model_generator import ModelGenerator
|
||||
|
||||
# Fix the issue by ensuring that the 'object' variable is properly defined and has the expected attributes.
|
||||
# The following code will ensure that 'object' is a valid SchemaObject before calling 'print_items'.
|
||||
from heroserver.openrpc.model.common import ContentDescriptorObject, ReferenceObject, SchemaObject
|
||||
from heroserver.openrpc.model.openrpc_spec import OpenRPCSpec
|
||||
from heroserver.openrpc.parser.parser import parser
|
||||
|
||||
script_dir = os.path.dirname(os.path.abspath(__file__))
|
||||
env = Environment(loader=FileSystemLoader(script_dir))
|
||||
|
||||
|
||||
class RestServerGenerator:
|
||||
def __init__(
|
||||
self,
|
||||
spec: OpenRPCSpec,
|
||||
dir: Path,
|
||||
) -> None:
|
||||
if not isinstance(spec, OpenRPCSpec):
|
||||
raise TypeError(f"Expected spec to be of type OpenRPCSpec, got {type(spec)}")
|
||||
if not isinstance(dir, Path):
|
||||
raise TypeError(f"Expected dir to be of type Path, got {type(dir)}")
|
||||
|
||||
self.model_generator = ModelGenerator(spec, PythonCodeGenerator())
|
||||
self.spec = spec
|
||||
self.dir = dir
|
||||
self.crud_methods_template = env.get_template("templates/crud_methods.jinja")
|
||||
self.internal_crud_methods_template = env.get_template("templates/internal_crud_methods.jinja")
|
||||
self.internal_crud_mock_methods_template = env.get_template("templates/internal_crud_mock_methods.jinja")
|
||||
self.imports_template = env.get_template("templates/imports.jinja")
|
||||
self.actor_method_template = env.get_template("templates/actor_method.jinja")
|
||||
self.internal_actor_method_template = env.get_template("templates/internal_actor_method.jinja")
|
||||
self.server_template = env.get_template("templates/server.jinja")
|
||||
|
||||
def generate(self):
|
||||
self.dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
self.generate_models()
|
||||
self.generate_crud()
|
||||
self.generate_mock_crud()
|
||||
self.generate_internal_actor_methods()
|
||||
self.generate_openapi()
|
||||
self.generate_openapi_mock()
|
||||
self.generate_server()
|
||||
|
||||
print(f"Generated API code has been written to {self.dir}")
|
||||
|
||||
def generate_server(self):
|
||||
code = self.server_template.render()
|
||||
|
||||
path = self.dir.joinpath("server.py")
|
||||
with open(path, "w") as file:
|
||||
file.write(code)
|
||||
|
||||
def generate_openapi(self):
|
||||
imports = self.imports_template.render(import_crud=True, import_models=True)
|
||||
app_init = "app = FastAPI()\n\n"
|
||||
methods = ""
|
||||
for path_str in self.model_generator.spec.get_root_objects().keys():
|
||||
object = self.model_generator.processed_objects[path_str]
|
||||
if object["code"] == "":
|
||||
continue
|
||||
|
||||
type_name = object["name"]
|
||||
variable_name = type_name.lower()
|
||||
methods += self.crud_methods_template.render(variable_name=variable_name, type_name=type_name) + "\n\n"
|
||||
|
||||
for method in self.spec.methods:
|
||||
if any(method.name.endswith(end) for end in ["get", "set", "delete"]):
|
||||
continue
|
||||
|
||||
params: Dict[str, str] = {}
|
||||
for param in method.params:
|
||||
params[param.name] = self.model_generator.jsonschema_to_type(["methods", method.name, "params", param.name], param.schema)
|
||||
|
||||
return_type = self.method_result_return_type(["methods", method.name, "result"], method.result)
|
||||
|
||||
function_name = method.name.lower().replace(".", "_")
|
||||
imports += f"from {function_name}_internal import {function_name}_internal\n"
|
||||
methods += (
|
||||
self.actor_method_template.render(
|
||||
rest_server_generator=self,
|
||||
function_name=function_name,
|
||||
method_params=params,
|
||||
method_result=return_type,
|
||||
)
|
||||
+ "\n\n"
|
||||
)
|
||||
|
||||
path = self.dir.joinpath("open_api.py")
|
||||
with open(path, "w") as file:
|
||||
file.write(f"{imports}\n\n{app_init}\n\n{methods}")
|
||||
|
||||
def generate_openapi_mock(self):
|
||||
imports = self.imports_template.render(mock=True, import_crud=True, import_models=True)
|
||||
app_init = "app = FastAPI()\n\n"
|
||||
methods = ""
|
||||
for path_str in self.model_generator.spec.get_root_objects().keys():
|
||||
object = self.model_generator.processed_objects[path_str]
|
||||
if object["code"] == "":
|
||||
continue
|
||||
|
||||
type_name = object["name"]
|
||||
variable_name = type_name.lower()
|
||||
methods += self.crud_methods_template.render(mock=True, variable_name=variable_name, type_name=type_name) + "\n\n"
|
||||
|
||||
for method in self.spec.methods:
|
||||
if any(method.name.endswith(end) for end in ["get", "set", "delete"]):
|
||||
continue
|
||||
|
||||
params: Dict[str, str] = {}
|
||||
for param in method.params:
|
||||
params[param.name] = self.model_generator.jsonschema_to_type(["methods", method.name, "params", param.name], param.schema)
|
||||
|
||||
return_type = self.method_result_return_type(["methods", method.name, "result"], method.result)
|
||||
|
||||
function_name = method.name.lower().replace(".", "_")
|
||||
imports += f"from {function_name}_internal import {function_name}_internal\n"
|
||||
methods += (
|
||||
self.actor_method_template.render(
|
||||
mock=True,
|
||||
rest_server_generator=self,
|
||||
function_name=function_name,
|
||||
method_params=params,
|
||||
method_result=return_type,
|
||||
)
|
||||
+ "\n\n"
|
||||
)
|
||||
|
||||
path = self.dir.joinpath("open_api_mock.py")
|
||||
with open(path, "w") as file:
|
||||
file.write(f"{imports}\n\n{app_init}\n\n{methods}")
|
||||
|
||||
def generate_models(self):
|
||||
imports = self.imports_template.render()
|
||||
code = self.model_generator.generate_models()
|
||||
path = self.dir.joinpath("models.py")
|
||||
|
||||
with open(path, "w") as file:
|
||||
file.write(f"{imports}\n\n{code}\n")
|
||||
|
||||
def generate_crud(self):
|
||||
imports = self.imports_template.render(import_models=True)
|
||||
methods = ""
|
||||
for path_str in self.model_generator.spec.get_root_objects().keys():
|
||||
object = self.model_generator.processed_objects[path_str]
|
||||
if object["code"] == "":
|
||||
continue
|
||||
|
||||
type_name = object["name"]
|
||||
variable_name = type_name.lower()
|
||||
methods += self.internal_crud_methods_template.render(variable_name=variable_name, type_name=type_name) + "\n\n"
|
||||
|
||||
path = self.dir.joinpath("crud.py")
|
||||
with open(path, "w") as file:
|
||||
file.write(f"{imports}\n\n{methods}")
|
||||
|
||||
def generate_mock_crud(self):
|
||||
imports = self.imports_template.render(import_models=True)
|
||||
imports += "from heroserver.openrpc.tools import create_example_object"
|
||||
methods = ""
|
||||
for path_str in self.model_generator.spec.get_root_objects().keys():
|
||||
object = self.model_generator.spec.get_root_objects()[path_str]
|
||||
|
||||
if isinstance(object, SchemaObject):
|
||||
print_items(object)
|
||||
|
||||
object = self.model_generator.processed_objects[path_str]
|
||||
if object["code"] == "":
|
||||
continue
|
||||
|
||||
type_name = object["name"]
|
||||
variable_name = type_name.lower()
|
||||
|
||||
methods += self.internal_crud_mock_methods_template.render(variable_name=variable_name, type_name=type_name) + "\n\n"
|
||||
|
||||
path = self.dir.joinpath("crud_mock.py")
|
||||
with open(path, "w") as file:
|
||||
file.write(f"{imports}\n\n{methods}")
|
||||
|
||||
def generate_internal_actor_methods(self):
|
||||
imports = self.imports_template.render(import_models=True)
|
||||
for method in self.spec.methods:
|
||||
function_name = method.name.lower().replace(".", "_") + "_internal"
|
||||
file_path = self.dir.joinpath(f"{function_name}.py")
|
||||
if file_path.exists():
|
||||
continue
|
||||
|
||||
if any(method.name.endswith(end) for end in ["get", "set", "delete"]):
|
||||
continue
|
||||
|
||||
params: Dict[str, str] = {}
|
||||
for param in method.params:
|
||||
params[param.name] = self.model_generator.jsonschema_to_type(["methods", method.name, "params", param.name], param.schema)
|
||||
|
||||
return_type = self.method_result_return_type(["methods", method.name, "result"], method.result)
|
||||
|
||||
code = self.internal_actor_method_template.render(
|
||||
rest_server_generator=self,
|
||||
function_name=function_name,
|
||||
method_params=params,
|
||||
method_result=return_type,
|
||||
)
|
||||
|
||||
with open(file_path, "w") as file:
|
||||
file.write(f"{imports}\n\n{code}")
|
||||
|
||||
def get_method_params(self, method_params: Dict[str, str]) -> str:
|
||||
return ", ".join([f"{param_name}: {param_type}" for param_name, param_type in method_params.items()])
|
||||
|
||||
def method_result_return_type(
|
||||
self,
|
||||
path: List[str],
|
||||
method_result: Optional[Union[ContentDescriptorObject, ReferenceObject]],
|
||||
) -> str:
|
||||
if not method_result:
|
||||
type_name = ""
|
||||
|
||||
if isinstance(method_result, ContentDescriptorObject):
|
||||
schema = method_result.schema
|
||||
type_name = self.model_generator.jsonschema_to_type(path, schema)
|
||||
|
||||
elif isinstance(method_result, ReferenceObject):
|
||||
type_name = self.model_generator.jsonschema_to_type(path, method_result)
|
||||
|
||||
return type_name
|
||||
|
||||
|
||||
def print_items(schema_object, depth=0):
|
||||
print(f"prito {schema_object.items}")
|
||||
indent = " " * depth
|
||||
if isinstance(schema_object.items, list):
|
||||
for item in schema_object.items:
|
||||
print(f"{indent}Item: {item}")
|
||||
if isinstance(item, SchemaObject):
|
||||
print_items(item, depth + 1)
|
||||
print(f"{indent}Example: {item.example}")
|
||||
elif isinstance(schema_object.items, SchemaObject):
|
||||
print(f"{indent}Item: {schema_object.items}")
|
||||
print_items(schema_object.items, depth + 1)
|
||||
print(f"{indent}Example: {schema_object.items.example}")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
data = parser(path="~/code/git.threefold.info/hero/hero_server_python/baobabspecs")
|
||||
|
||||
spec_object = OpenRPCSpec.load(data)
|
||||
server_generator = RestServerGenerator(spec_object, Path("/tmp/rest2"))
|
||||
server_generator.generate()
|
@ -0,0 +1,7 @@
|
||||
@app.post("/$circleguid/{{function_name}}")
|
||||
def {{ function_name }}(circleguid: int, {{ rest_server_generator.get_method_params(method_params) }}){% if method_result %} -> {{ method_result }}{% endif %}:
|
||||
{% if mock %}
|
||||
return {{function_name}}_internal_mock(context, circleguid, {{', '.join(method_params.keys())}})
|
||||
{% else %}
|
||||
return {{function_name}}_internal(context, circleguid, {{', '.join(method_params.keys())}})
|
||||
{% endif %}
|
@ -0,0 +1,16 @@
|
||||
{% if mock %}
|
||||
{% set suffix = '_mock' %}
|
||||
{% else %}
|
||||
{% set suffix = '' %}
|
||||
{% endif %}
|
||||
@app.get("/{circleguid}/{{variable_name}}_manager{{suffix}}/{{variable_name}}/{id}")
|
||||
def {{variable_name}}_get(circleguid: int, id: str)-> {{type_name}}:
|
||||
return {{variable_name}}_get_internal{{suffix}}(circleguid, id)
|
||||
|
||||
@app.post("/{circleguid}/{{variable_name}}_manager{{suffix}}/{{variable_name}}")
|
||||
def {{variable_name}}_set(circleguid: int, {{variable_name}}: {{type_name}})-> bool:
|
||||
return {{variable_name}}_set_internal{{suffix}}(circleguid, {{variable_name}})
|
||||
|
||||
@app.delete("/{circleguid}/{{variable_name}}_manager{{suffix}}/{{variable_name}}/{id}")
|
||||
def {{variable_name}}_delete(circleguid: int, id: str)-> bool:
|
||||
return {{variable_name}}_delete_internal{{suffix}}(circleguid, id)
|
@ -0,0 +1,12 @@
|
||||
{% if mock %}
|
||||
{% set suffix = '_mock' %}
|
||||
{% else %}
|
||||
{% set suffix = '' %}
|
||||
{% endif %}
|
||||
from fastapi import FastAPI
|
||||
from pydantic import BaseModel, Field
|
||||
from typing import List
|
||||
from enum import Enum
|
||||
{% if import_models %}from models import *{% endif %}
|
||||
{% if import_crud %}from crud{{suffix}} import *{% endif %}
|
||||
{% if import_openapi %}from open_api import *{% endif %}
|
@ -0,0 +1,9 @@
|
||||
from typing import List, Optional, Dict, Union
|
||||
from enum import Enum
|
||||
|
||||
def {{function_name}}(context, circleguid: int, {{rest_server_generator.get_method_params(method_params)}}) -> {{method_result}}:
|
||||
#context allows us to see who the user is and which groups the user is
|
||||
#context also gives a logging feature
|
||||
#context is linked to 1 circle
|
||||
#context is linked to a DB (OSIS)
|
||||
pass
|
@ -0,0 +1,11 @@
|
||||
def {{variable_name}}_get_internal(circleguid: int, id: str) -> {{type_name}}:
|
||||
return {{type_name}}()
|
||||
|
||||
def {{variable_name}}_set_internal(circleguid: int, {{variable_name}}: {{type_name}})-> bool:
|
||||
return True
|
||||
|
||||
def {{variable_name}}_delete_internal(circleguid: int, id: str)-> bool:
|
||||
##would use osis to delete this objecc
|
||||
return True
|
||||
|
||||
|
@ -0,0 +1,8 @@
|
||||
def {{variable_name}}_get_internal_mock(circleguid: int, id: str) -> {{type_name}}:
|
||||
return create_example_object({{type_name}})
|
||||
|
||||
def {{variable_name}}_set_internal_mock(circleguid: int, {{variable_name}}: {{type_name}})-> bool:
|
||||
return True
|
||||
|
||||
def {{variable_name}}_delete_internal_mock(circleguid: int, id: str)-> bool:
|
||||
return True
|
@ -0,0 +1,5 @@
|
||||
import uvicorn
|
||||
from open_api import app
|
||||
|
||||
if __name__ == "__main__":
|
||||
uvicorn.run(app, host="0.0.0.0", port=8000)
|
@ -0,0 +1,169 @@
|
||||
import os
|
||||
from pathlib import Path
|
||||
from typing import Dict, List, Optional, Union
|
||||
|
||||
from jinja2 import Environment, FileSystemLoader
|
||||
|
||||
from heroserver.openrpc.generator.actor.vlang.vlang_code_generator import VlangGenerator
|
||||
from heroserver.openrpc.generator.model_generator import ModelGenerator
|
||||
from heroserver.openrpc.model.common import ContentDescriptorObject, ReferenceObject
|
||||
from heroserver.openrpc.model.openrpc_spec import OpenRPCSpec
|
||||
|
||||
script_dir = os.path.dirname(os.path.abspath(__file__))
|
||||
env = Environment(loader=FileSystemLoader(script_dir))
|
||||
|
||||
|
||||
class RestServerGenerator:
|
||||
def __init__(
|
||||
self,
|
||||
spec: OpenRPCSpec,
|
||||
dir: Path,
|
||||
) -> None:
|
||||
self.lang_code_generator = VlangGenerator()
|
||||
self.model_generator = ModelGenerator(spec, VlangGenerator())
|
||||
self.spec = spec
|
||||
self.dir = dir
|
||||
self.crud_methods_template = env.get_template("templates/crud_methods.jinja")
|
||||
self.internal_crud_methods_template = env.get_template("templates/internal_crud_methods.jinja")
|
||||
self.imports_template = env.get_template("templates/imports.jinja")
|
||||
self.actor_method_template = env.get_template("templates/actor_method.jinja")
|
||||
self.internal_actor_method_template = env.get_template("templates/internal_actor_method.jinja")
|
||||
self.server_template = env.get_template("templates/server.jinja")
|
||||
|
||||
def generate(self):
|
||||
self.dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
self.generate_models()
|
||||
self.generate_crud()
|
||||
self.generate_internal_actor_methods()
|
||||
self.generate_openapi()
|
||||
self.generate_server()
|
||||
|
||||
print(f"Generated API code has been written to {self.dir}")
|
||||
|
||||
def generate_server(self):
|
||||
imports = self.imports_template.render(import_vweb=True)
|
||||
code = self.server_template.render()
|
||||
|
||||
path = self.dir.joinpath("server.v")
|
||||
with open(path, "w") as file:
|
||||
file.write(f"{imports}\n\n{code}")
|
||||
|
||||
def generate_openapi(self):
|
||||
imports = self.imports_template.render(import_vweb=True)
|
||||
methods = ""
|
||||
for path_str in self.model_generator.spec.get_root_objects().keys():
|
||||
object = self.model_generator.processed_objects[path_str]
|
||||
if object["code"] == "":
|
||||
continue
|
||||
|
||||
type_name = object["name"]
|
||||
variable_name = type_name.lower()
|
||||
methods += self.crud_methods_template.render(variable_name=variable_name, type_name=type_name) + "\n\n"
|
||||
|
||||
for method in self.spec.methods:
|
||||
if any(method.name.endswith(end) for end in ["get", "set", "delete"]):
|
||||
continue
|
||||
|
||||
params: Dict[str, str] = {}
|
||||
for param in method.params:
|
||||
params[param.name] = self.model_generator.jsonschema_to_type(["methods", method.name, "params", param.name], param.schema)
|
||||
|
||||
return_type = self.method_result_return_type(["methods", method.name, "result"], method.result)
|
||||
|
||||
function_name = method.name.lower().replace(".", "_")
|
||||
methods += (
|
||||
self.actor_method_template.render(
|
||||
rest_server_generator=self,
|
||||
function_name=function_name,
|
||||
method_params=params,
|
||||
method_result=return_type,
|
||||
)
|
||||
+ "\n\n"
|
||||
)
|
||||
|
||||
path = self.dir.joinpath("open_api.v")
|
||||
with open(path, "w") as file:
|
||||
file.write(f"{imports}\n\n{methods}")
|
||||
|
||||
def generate_models(self):
|
||||
imports = self.imports_template.render()
|
||||
code = self.model_generator.generate_models()
|
||||
path = self.dir.joinpath("models.v")
|
||||
|
||||
with open(path, "w") as file:
|
||||
file.write(f"{imports}\n\n{code}\n")
|
||||
|
||||
def generate_crud(self):
|
||||
imports = self.imports_template.render(import_models=True)
|
||||
methods = ""
|
||||
for path_str in self.model_generator.spec.get_root_objects().keys():
|
||||
object = self.model_generator.processed_objects[path_str]
|
||||
if object["code"] == "":
|
||||
continue
|
||||
|
||||
type_name = object["name"]
|
||||
variable_name = type_name.lower()
|
||||
methods += self.internal_crud_methods_template.render(variable_name=variable_name, type_name=type_name) + "\n\n"
|
||||
|
||||
path = self.dir.joinpath("crud.v")
|
||||
with open(path, "w") as file:
|
||||
file.write(f"{imports}\n\n{methods}")
|
||||
|
||||
def generate_internal_actor_methods(self):
|
||||
imports = self.imports_template.render(import_models=True)
|
||||
for method in self.spec.methods:
|
||||
function_name = method.name.lower().replace(".", "_") + "_internal"
|
||||
file_path = self.dir.joinpath(f"{function_name}.v")
|
||||
if file_path.exists():
|
||||
continue
|
||||
|
||||
if any(method.name.endswith(end) for end in ["get", "set", "delete"]):
|
||||
continue
|
||||
|
||||
params: Dict[str, str] = {}
|
||||
for param in method.params:
|
||||
params[param.name] = self.model_generator.jsonschema_to_type(["methods", method.name, "params", param.name], param.schema)
|
||||
|
||||
return_type = self.method_result_return_type(["methods", method.name, "result"], method.result)
|
||||
|
||||
code = self.internal_actor_method_template.render(
|
||||
rest_server_generator=self,
|
||||
function_name=function_name,
|
||||
method_params=params,
|
||||
method_result=return_type,
|
||||
)
|
||||
|
||||
with open(file_path, "w") as file:
|
||||
file.write(f"{imports}\n\n{code}")
|
||||
|
||||
def get_method_params(self, method_params: Dict[str, str]) -> str:
|
||||
return ", ".join([f"{param_name} {param_type}" for param_name, param_type in method_params.items()])
|
||||
|
||||
def method_result_return_type(
|
||||
self,
|
||||
path: List[str],
|
||||
method_result: Optional[Union[ContentDescriptorObject, ReferenceObject]],
|
||||
) -> str:
|
||||
if not method_result:
|
||||
type_name = ""
|
||||
|
||||
if isinstance(method_result, ContentDescriptorObject):
|
||||
schema = method_result.schema
|
||||
type_name = self.model_generator.jsonschema_to_type(path, schema)
|
||||
|
||||
elif isinstance(method_result, ReferenceObject):
|
||||
type_name = self.model_generator.jsonschema_to_type(path, method_result)
|
||||
|
||||
return type_name
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
from heroserver.openrpc.generator.model_generator import ModelGenerator
|
||||
from heroserver.openrpc.parser.parser import parser
|
||||
|
||||
data = parser(path="/root/code/git.threefold.info/projectmycelium/hero_server/generatorexamples/example1/specs/storymanager")
|
||||
|
||||
spec_object = OpenRPCSpec.load(data)
|
||||
server_generator = RestServerGenerator(spec_object, Path("/tmp/rest3"))
|
||||
server_generator.generate()
|
@ -0,0 +1,20 @@
|
||||
@['/:circleguid/{{function_name}}'; post]
|
||||
pub fn (mut v_server_app VServerApp) {{ function_name }}(circleguid int) vweb.Result{
|
||||
body := json2.raw_decode(v_server_app.req.data)!.as_map()
|
||||
{% for param_name, param_tpe in method_params.items() %}
|
||||
{% if rest_server_generator.lang_code_generator.is_primitive(param_type) %}
|
||||
{{param_name}} := body['{{param_name}}'].{{param_type}}()
|
||||
{% else %}
|
||||
{{param_name}} := json2.decode[{{param_type}}](body['{{param_name}}'].json_str()) or {
|
||||
v_server_app.set_status(400, '')
|
||||
return v_server_app.text('HTTP 400: Bad Request')
|
||||
}
|
||||
{% endif %}
|
||||
{% endfor %}
|
||||
res := {{function_name}}_internal(context, circleguid, {{', '.join(method_params.keys())}}) or {
|
||||
v_server_app.set_status(500, '')
|
||||
return v_server_app.text('HTTP 500: Internal Server Error')
|
||||
}
|
||||
|
||||
return v_server_app.json(res)
|
||||
}
|
@ -0,0 +1,32 @@
|
||||
@['/:circleguid/{{variable_name}}_manager/{{variable_name}}/:id'; get]
|
||||
pub fn (mut v_server_app VServerApp) {{variable_name}}_get(circleguid int, id str) vweb.Result{
|
||||
res := {{variable_name}}_get_internal(circleguid, id) or {
|
||||
v_server_app.set_status(500, '')
|
||||
return v_server_app.text('HTTP 500: Internal Server Error')
|
||||
}
|
||||
return v_server_app.json(res)
|
||||
}
|
||||
|
||||
@['/:circleguid/{{variable_name}}_manager/{{variable_name}}'; post]
|
||||
pub fn (mut v_server_app VServerApp) {{variable_name}}_set(circleguid int) vweb.Result{
|
||||
{{variable_name}} := json2.decode[{{type_name}}](v_server_app.req.data) or {
|
||||
v_server_app.set_status(400, '')
|
||||
return v_server_app.text('HTTP 400: Bad Request')
|
||||
}
|
||||
res := {{variable_name}}_set_internal(circleguid, {{variable_name}})or {
|
||||
v_server_app.set_status(500, '')
|
||||
return v_server_app.text('HTTP 500: Internal Server Error')
|
||||
}
|
||||
|
||||
return v_server_app.json(res)
|
||||
}
|
||||
|
||||
@['/:circleguid/{{variable_name}}_manager/{{variable_name}}/:id'; delete]
|
||||
pub fn (mut v_server_app VServerApp) {{variable_name}}_delete(circleguid int, id str) vweb.Result{
|
||||
##would use osis to delete this objecc
|
||||
res := {{variable_name}}_delete_internal(circleguid, id) or {
|
||||
v_server_app.set_status(500, '')
|
||||
return v_server_app.text('HTTP 500: Internal Server Error')
|
||||
}
|
||||
return v_server_app.json(res)
|
||||
}
|
@ -0,0 +1,4 @@
|
||||
module main
|
||||
|
||||
import x.json2
|
||||
{% if import_vweb %}import vweb{% endif %}
|
@ -0,0 +1,11 @@
|
||||
module main
|
||||
|
||||
import freeflowuniverse.crystallib.context
|
||||
|
||||
pub fn {{function_name}}(ctx context.Context, circleguid int, {{rest_server_generator.get_method_params(method_params)}}) !{{method_result}}{
|
||||
// context allows us to see who the user is and which groups the user is
|
||||
// context also gives a logging feature
|
||||
// context is linked to 1 circle
|
||||
// context is linked to a DB (OSIS)
|
||||
return 0
|
||||
}
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user