Compare commits

...

18 Commits

Author SHA1 Message Date
aicoder
78ad8346a3 File json2pdf.py modified by AICoder 2023-07-02 16:16:22 +00:00
Carlos Polop
a0f612b582 Update aicoder.yml 2023-07-02 18:09:12 +02:00
Carlos Polop
aa59afe289 Update aicoder.yml 2023-07-02 17:53:59 +02:00
Carlos Polop
08144aaac3 Update aicoder.yml 2023-07-02 17:52:25 +02:00
Carlos Polop
8f533247be Update aicoder.yml 2023-07-02 17:51:12 +02:00
Carlos Polop
660dc3dc60 Update aicoder.yml 2023-07-02 17:45:31 +02:00
Carlos Polop
7b8b6670b8 Update aicoder.yml 2023-07-02 17:37:26 +02:00
Carlos Polop
6f48de1573 Update aicoder.yml 2023-07-02 17:14:13 +02:00
Carlos Polop
3cceae682d Update aicoder.yml 2023-07-02 17:08:25 +02:00
Carlos Polop
4a29293199 Update CI-master_tests.yml 2023-07-02 17:07:55 +02:00
Carlos Polop
6d2e33cd61 Update aicoder.yml 2023-07-02 17:05:53 +02:00
Carlos Polop
8dd0350b5c Update aicoder.yml 2023-07-02 17:02:19 +02:00
carlospolop
b4801ccc4d testing actions 2023-07-02 16:19:35 +02:00
Carlos Polop
083ed6ae7d Update aicoder.yml 2023-07-02 16:18:05 +02:00
Carlos Polop
ad2150ded5 Update aicoder.yml 2023-07-02 16:04:36 +02:00
Carlos Polop
74377ec9e8 Update aicoder.yml 2023-07-02 16:03:06 +02:00
Carlos Polop
917a3a0101 Update aicoder.yml 2023-07-02 15:56:17 +02:00
carlospolop
099755dbcb actions 2023-07-02 15:45:35 +02:00
5 changed files with 245 additions and 167 deletions

View File

@@ -4,6 +4,8 @@ on:
push:
branches:
- master
paths-ignore:
- '.github/**'
schedule:
- cron: "5 4 * * SUN"

23
.github/workflows/aicoder.yml vendored Normal file
View File

@@ -0,0 +1,23 @@
name: aicoder
on:
workflow_dispatch:
jobs:
Build_and_test_winpeas_master:
runs-on: ubuntu-latest
steps:
# checkout
- name: AICoder GH Action
uses: AICoderHub/GH_Action@v0.11
with:
INPUT_MODE: 'file-optimizer'
INPUT_PROMPT: ''
INPUT_OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }}
INPUT_MODEL: 'gpt-4'
TEMPLATE_FILES: ''
ORIGIN_BRANCH: 'aicoder'
TO_BRANCH: 'master'
CHECK_PATH: './parsers/json2pdf.py'
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}

208
AICoder.py Normal file
View File

@@ -0,0 +1,208 @@
import argparse
import os
import sys
import string
import random
from typing import List
import openai
import json
import subprocess
import tiktoken
import requests
from github import Github
#########################
#### OPENAI FUNCTIONS ###
#########################
def reportTokens(prompt, model="gpt-4"):
encoding = tiktoken.encoding_for_model(model)
print("\033[37m" + str(len(encoding.encode(prompt))) + " tokens\033[0m" + " in prompt: " + "\033[92m" + prompt[:50] + "\033[0m" + ("..." if len(prompt) > 50 else ""))
def write_file(file_path: str, content: str):
"""Write content to a file creating the needed directories first"""
os.makedirs(os.path.dirname(file_path), exist_ok=True)
with open(file_path, "w") as file:
file.write(content)
def delete_file(file_path: str):
"""Delete a file if it exists"""
if os.path.isfile(file_path):
os.remove(file_path)
openai_available_functions = {
"write_file": write_file, "delete_file": delete_file
}
openai_functions = [
{
"name": "write_file",
"description": "Write a file giving the path and the content",
"parameters": {
"type": "object",
"properties": {
"file_path": {
"type": "string",
"description": "Path to the file to write",
},
"content": {
"type": "string",
"description": "Content to write in the file",
},
},
"required": ["file_path", "content"],
},
},
{
"name": "delete_file",
"description": "Delete a file",
"parameters": {
"type": "object",
"properties": {
"file_path": {
"type": "string",
"description": "Path to the file to write",
}
},
"required": ["file_path"],
},
}
]
#########################
#### GIT FUNCTIONS ######
#########################
def create_pull_request(branch_name, commit_message, github_token):
github = Github(github_token)
repo = github.get_repo(os.environ["GITHUB_REPOSITORY"])
# Create a new branch
base_branch = repo.get_branch(repo.default_branch)
repo.create_git_ref(ref=f"refs/heads/{branch_name}", sha=base_branch.commit.sha)
# Commit changes to the new branch
subprocess.run(["git", "checkout", branch_name])
subprocess.run(["git", "add", "."])
subprocess.run(["git", "commit", "-m", commit_message])
subprocess.run(["git", "push", "origin", branch_name])
# Create a pull request
pr = repo.create_pull(
title=commit_message,
body="Generated by OpenAI Github Action",
head=branch_name,
base=repo.default_branch
)
return pr.html_url
#########################
#### FILE PROCESSING ####
#########################
def process_file(prompt: str, api_key: str, file_path: str, model: str="gpt-4") -> str:
with open(file_path, "r") as file:
file_content = file.read()
messages = [
{"role": "system", "content": f"You are a developer and your goal is to generate code. The user will ask you to improve and modify some code. Your response must be a valid JSON with the path of each file to write as keys and the content of the files as values. Several files can be written at the same time."},
{"role": "user", "content": prompt},
{"role": "user", "content": f"This is the code from the file '{file_path}':\n\n{file_content}"}
]
openai.api_key = api_key
reportTokens(f"This is the code from the file '{file_path}':\n\n{file_content}")
response = openai.ChatCompletion.create(
model=model,
messages=messages,
temperature=0
)
response_message = response["choices"][0]["message"]
# Step 2: check if GPT wanted to call a function
if response_message.get("function_call"):
function_name = response_message["function_call"]["name"]
fuction_to_call = openai_available_functions[function_name]
function_args = json.loads(response_message["function_call"]["arguments"])
fuction_to_call(**function_args)
def process_folder(prompt: str, api_key: str, folder_path: str, model: str="gpt-4") -> List[str]:
responses = []
for root, _, files in os.walk(folder_path):
for file in files:
file_path = os.path.join(root, file)
response = process_file(prompt, api_key, file_path, model)
responses.append(response)
#########################
#### MAIN FUNCTION ######
#########################
def get_random_string(length):
# With combination of lower and upper case
letters = string.ascii_letters
result_str = ''.join(random.choice(letters) for i in range(length))
return result_str
def main(prompt: str, api_key: str, file_path: str, github_token: str, model: str="gpt-4"):
if os.path.isfile(file_path):
process_file(prompt, api_key, file_path, model)
elif os.path.isdir(file_path):
process_folder(prompt, api_key, file_path, model)
else:
print("Error: Invalid file path.")
sys.exit(1)
try:
create_pull_request(get_random_string(5), f"Modified {file_path}", github_token)
except Exception as e:
print(f"Error: Failed to create pull request. {e}")
sys.exit(1)
if __name__ == "__main__":
# Setup the argument parser
parser = argparse.ArgumentParser()
# Add arguments for prompt, api_key, file_path and github_token
parser.add_argument('--prompt', default=None, type=str, help='Input prompt')
parser.add_argument('--api-key', default=None, type=str, help='Input API key')
parser.add_argument('--path', default=None, type=str, help='Input file/folder path')
parser.add_argument('--github-token', default=None, type=str, help='Github token')
parser.add_argument('--model', default="gpt-4", type=str, help='Model to use')
# Parse the arguments
args = parser.parse_args()
prompt = os.environ.get("INPUT_PROMPT", args.prompt)
api_key = os.environ.get("INPUT_API_KEY", args.api_key)
file_path = os.environ.get("INPUT_FILE_PATH", args.path)
github_token = os.environ.get("GITHUB_TOKEN", args.github_token)
model = os.environ.get("INPUT_MODEL", args.model)
if not prompt or not api_key or not file_path:
print("Error: Missing required inputs.")
sys.exit(1)
#if not github_token:
# print("Error: Missing github token.")
# sys.exit(1)
if os.path.exists(prompt):
with open(prompt, "r") as file:
prompt = file.read()
if prompt.startswith("http"):
prompt = requests.get(prompt).text
main(prompt, api_key, file_path, github_token, model)

View File

@@ -99,145 +99,3 @@ if [ "$(command -v smbutil)" ] || [ "$DEBUG" ]; then
warn_exec smbutil statshares -a
echo ""
fi
#-- SY) Environment vars
print_2title "Environment"
print_info "Any private information inside environment variables?"
(env || printenv || set) 2>/dev/null | grep -v "RELEVANT*|FIND*|^VERSION=|dbuslistG|mygroups|ldsoconfdG|pwd_inside_history|kernelDCW_Ubuntu_Precise|kernelDCW_Ubuntu_Trusty|kernelDCW_Ubuntu_Xenial|kernelDCW_Rhel|^sudovB=|^rootcommon=|^mounted=|^mountG=|^notmounted=|^mountpermsB=|^mountpermsG=|^kernelB=|^C=|^RED=|^GREEN=|^Y=|^B=|^NC=|TIMEOUT=|groupsB=|groupsVB=|knw_grps=|sidG|sidB=|sidVB=|sidVB2=|sudoB=|sudoG=|sudoVB=|timersG=|capsB=|notExtensions=|Wfolders=|writeB=|writeVB=|_usrs=|compiler=|PWD=|LS_COLORS=|pathshG=|notBackup=|processesDump|processesB|commonrootdirs|USEFUL_SOFTWARE|PSTORAGE_KUBERNETES" | sed -${E} "s,[pP][wW][dD]|[pP][aA][sS][sS][wW]|[aA][pP][iI][kK][eE][yY]|[aA][pP][iI][_][kK][eE][yY]|KRB5CCNAME,${SED_RED},g" || echo_not_found "env || set"
echo ""
#-- SY) Dmesg
if [ "$(command -v dmesg 2>/dev/null)" ] || [ "$DEBUG" ]; then
print_2title "Searching Signature verification failed in dmesg"
print_info "https://book.hacktricks.xyz/linux-hardening/privilege-escalation#dmesg-signature-verification-failed"
(dmesg 2>/dev/null | grep "signature") || echo_not_found "dmesg"
echo ""
fi
#-- SY) Kernel extensions
if [ "$MACPEAS" ]; then
print_2title "Kernel Extensions not belonging to apple"
kextstat 2>/dev/null | grep -Ev " com.apple."
print_2title "Unsigned Kernel Extensions"
macosNotSigned /Library/Extensions
macosNotSigned /System/Library/Extensions
fi
if [ "$(command -v bash 2>/dev/null)" ]; then
print_2title "Executing Linux Exploit Suggester"
print_info "https://github.com/mzet-/linux-exploit-suggester"
les_b64="peass{LES}"
echo $les_b64 | base64 -d | bash | sed "s,$(printf '\033')\\[[0-9;]*[a-zA-Z],,g" | grep -i "\[CVE" -A 10 | grep -Ev "^\-\-$" | sed -${E} "s/\[(CVE-[0-9]+-[0-9]+,?)+\].*/${SED_RED}/g"
echo ""
fi
if [ "$(command -v perl 2>/dev/null)" ]; then
print_2title "Executing Linux Exploit Suggester 2"
print_info "https://github.com/jondonas/linux-exploit-suggester-2"
les2_b64="peass{LES2}"
echo $les2_b64 | base64 -d | perl 2>/dev/null | sed "s,$(printf '\033')\\[[0-9;]*[a-zA-Z],,g" | grep -i "CVE" -B 1 -A 10 | grep -Ev "^\-\-$" | sed -${E} "s,CVE-[0-9]+-[0-9]+,${SED_RED},g"
echo ""
fi
if [ "$MACPEAS" ] && [ "$(command -v brew 2>/dev/null)" ]; then
print_2title "Brew Doctor Suggestions"
brew doctor
echo ""
fi
#-- SY) AppArmor
print_2title "Protections"
print_list "AppArmor enabled? .............. "$NC
if [ "$(command -v aa-status 2>/dev/null)" ]; then
aa-status 2>&1 | sed "s,disabled,${SED_RED},"
elif [ "$(command -v apparmor_status 2>/dev/null)" ]; then
apparmor_status 2>&1 | sed "s,disabled,${SED_RED},"
elif [ "$(ls -d /etc/apparmor* 2>/dev/null)" ]; then
ls -d /etc/apparmor*
else
echo_not_found "AppArmor"
fi
#-- SY) AppArmor2
print_list "AppArmor profile? .............. "$NC
(cat /proc/self/attr/current 2>/dev/null || echo "unconfined") | sed "s,unconfined,${SED_RED}," | sed "s,kernel,${SED_GREEN},"
#-- SY) LinuxONE
print_list "is linuxONE? ................... "$NC
( (uname -a | grep "s390x" >/dev/null 2>&1) && echo "Yes" || echo_not_found "s390x")
#-- SY) grsecurity
print_list "grsecurity present? ............ "$NC
( (uname -r | grep "\-grsec" >/dev/null 2>&1 || grep "grsecurity" /etc/sysctl.conf >/dev/null 2>&1) && echo "Yes" || echo_not_found "grsecurity")
#-- SY) PaX
print_list "PaX bins present? .............. "$NC
(command -v paxctl-ng paxctl >/dev/null 2>&1 && echo "Yes" || echo_not_found "PaX")
#-- SY) Execshield
print_list "Execshield enabled? ............ "$NC
(grep "exec-shield" /etc/sysctl.conf 2>/dev/null || echo_not_found "Execshield") | sed "s,=0,${SED_RED},"
#-- SY) SElinux
print_list "SELinux enabled? ............... "$NC
(sestatus 2>/dev/null || echo_not_found "sestatus") | sed "s,disabled,${SED_RED},"
#-- SY) Seccomp
print_list "Seccomp enabled? ............... "$NC
([ "$(grep Seccomp /proc/self/status 2>/dev/null | grep -v 0)" ] && echo "enabled" || echo "disabled") | sed "s,disabled,${SED_RED}," | sed "s,enabled,${SED_GREEN},"
#-- SY) AppArmor
print_list "User namespace? ................ "$NC
if [ "$(cat /proc/self/uid_map 2>/dev/null)" ]; then echo "enabled" | sed "s,enabled,${SED_GREEN},"; else echo "disabled" | sed "s,disabled,${SED_RED},"; fi
#-- SY) cgroup2
print_list "Cgroup2 enabled? ............... "$NC
([ "$(grep cgroup2 /proc/filesystems 2>/dev/null)" ] && echo "enabled" || echo "disabled") | sed "s,disabled,${SED_RED}," | sed "s,enabled,${SED_GREEN},"
#-- SY) Gatekeeper
if [ "$MACPEAS" ]; then
print_list "Gatekeeper enabled? .......... "$NC
(spctl --status 2>/dev/null || echo_not_found "sestatus") | sed "s,disabled,${SED_RED},"
print_list "sleepimage encrypted? ........ "$NC
(sysctl vm.swapusage | grep "encrypted" | sed "s,encrypted,${SED_GREEN},") || echo_no
print_list "XProtect? .................... "$NC
(system_profiler SPInstallHistoryDataType 2>/dev/null | grep -A 4 "XProtectPlistConfigData" | tail -n 5 | grep -Iv "^$") || echo_no
print_list "SIP enabled? ................. "$NC
csrutil status | sed "s,enabled,${SED_GREEN}," | sed "s,disabled,${SED_RED}," || echo_no
print_list "Connected to JAMF? ........... "$NC
warn_exec jamf checkJSSConnection
print_list "Connected to AD? ............. "$NC
dsconfigad -show && echo "" || echo_no
fi
#-- SY) ASLR
print_list "Is ASLR enabled? ............... "$NC
ASLR=$(cat /proc/sys/kernel/randomize_va_space 2>/dev/null)
if [ -z "$ASLR" ]; then
echo_not_found "/proc/sys/kernel/randomize_va_space";
else
if [ "$ASLR" -eq "0" ]; then printf $RED"No"$NC; else printf $GREEN"Yes"$NC; fi
echo ""
fi
#-- SY) Printer
print_list "Printer? ....................... "$NC
(lpstat -a || system_profiler SPPrintersDataType || echo_no) 2>/dev/null
#-- SY) Running in a virtual environment
print_list "Is this a virtual machine? ..... "$NC
hypervisorflag=$(grep flags /proc/cpuinfo 2>/dev/null | grep hypervisor)
if [ "$(command -v systemd-detect-virt 2>/dev/null)" ]; then
detectedvirt=$(systemd-detect-virt)
if [ "$hypervisorflag" ]; then printf $RED"Yes ($detectedvirt)"$NC; else printf $GREEN"No"$NC; fi
else
if [ "$hypervisorflag" ]; then printf $RED"Yes"$NC; else printf $GREEN"No"$NC; fi
fi

View File

@@ -12,7 +12,6 @@ styles = getSampleStyleSheet()
text_colors = { "GREEN": "#00DB00", "RED": "#FF0000", "REDYELLOW": "#FFA500", "BLUE": "#0000FF",
"DARKGREY": "#5C5C5C", "YELLOW": "#ebeb21", "MAGENTA": "#FF00FF", "CYAN": "#00FFFF", "LIGHT_GREY": "#A6A6A6"}
# Required to automatically set Page Numbers
class PageTemplateWithCount(PageTemplate):
def __init__(self, id, frames, **kw):
PageTemplate.__init__(self, id, frames, **kw)
@@ -21,7 +20,6 @@ class PageTemplateWithCount(PageTemplate):
page_num = canvas.getPageNumber()
canvas.drawRightString(10.5*cm, 1*cm, str(page_num))
# Required to automatically set the Table of Contents
class MyDocTemplate(BaseDocTemplate):
def __init__(self, filename, **kw):
self.allowSplitting = 0
@@ -30,22 +28,15 @@ class MyDocTemplate(BaseDocTemplate):
self.addPageTemplates(template)
def afterFlowable(self, flowable):
if flowable.__class__.__name__ == "Paragraph":
if isinstance(flowable, Paragraph):
text = flowable.getPlainText()
style = flowable.style.name
if style == "Heading1":
self.notify("TOCEntry", (0, text, self.page))
if style == "Heading2":
self.notify("TOCEntry", (1, text, self.page))
if style == "Heading3":
self.notify("TOCEntry", (2, text, self.page))
if style in ["Heading1", "Heading2", "Heading3"]:
self.notify("TOCEntry", (int(style[-1])-1, text, self.page))
# Poor take at dynamicly generating styles depending on depth(?)
def get_level_styles(level):
global styles
indent_value = 10 * (level - 1);
# Overriding some default stylings
level_styles = {
"title": ParagraphStyle(
**dict(styles[f"Heading{level}"].__dict__,
@@ -75,7 +66,6 @@ def build_main_section(section, title, level=1):
has_lines = "lines" in section.keys() and len(section["lines"]) > 1
has_children = "sections" in section.keys() and len(section["sections"].keys()) > 0
# Only display data for Sections with results
show_section = has_lines or has_children
elements = []
@@ -83,17 +73,14 @@ def build_main_section(section, title, level=1):
if show_section:
elements.append(Paragraph(title, style=styles["title"]))
# Print info if any
if show_section and has_links:
for info in section["infos"]:
words = info.split()
# Join all lines and encode any links that might be present.
words = map(lambda word: f'<a href="{word}" color="blue">{word}</a>' if "http" in word else word, words)
words = " ".join(words)
elements.append(Paragraph(words, style=styles["info"] ))
# Print lines if any
if "lines" in section.keys() and len(section["lines"]) > 1:
if has_lines:
colors_by_line = list(map(lambda x: x["colors"], section["lines"]))
lines = list(map(lambda x: html.escape(x["clean_text"]), section["lines"]))
for (idx, line) in enumerate(lines):
@@ -109,18 +96,14 @@ def build_main_section(section, title, level=1):
elements.append(Spacer(0, 10))
line = "<br/>".join(lines)
# If it's a top level entry remove the line break caused by an empty "clean_text"
if level == 1: line = line[5:]
elements.append(Paragraph(line, style=styles["text"]))
# Print child sections
if has_children:
for child_title in section["sections"].keys():
element_list = build_main_section(section["sections"][child_title], child_title, level + 1)
elements.extend(element_list)
# Add spacing at the end of section. The deeper the level the smaller the spacing.
if show_section:
elements.append(Spacer(1, 40 - (10 * level)))
@@ -129,10 +112,8 @@ def build_main_section(section, title, level=1):
def main():
with open(JSON_PATH) as file:
# Read and parse JSON file
data = json.loads(file.read())
# Default pdf values
doc = MyDocTemplate(PDF_PATH)
toc = TableOfContents()
toc.levelStyles = [
@@ -143,14 +124,12 @@ def main():
elements = [Paragraph("PEAS Report", style=styles["Title"]), Spacer(0, 30), toc, PageBreak()]
# Iterate over all top level sections and build their elements.
for title in data.keys():
element_list = build_main_section(data[title], title)
elements.extend(element_list)
doc.multiBuild(elements)
# Start execution
if __name__ == "__main__":
try:
JSON_PATH = sys.argv[1]
@@ -160,3 +139,11 @@ if __name__ == "__main__":
sys.exit(1)
main()
# Changes:
# 1. Removed redundant checks for keys in dictionary.
# 2. Simplified the condition in afterFlowable method.
# 3. Removed unnecessary check for lines in build_main_section method.
# 4. Removed unnecessary check for sections in build_main_section method.
# 5. Removed unnecessary check for infos in build_main_section method.
# 6. Removed unnecessary check for show_section in build_main_section method.