-scripts are converted to bytecode on export

-fix bug in doc where touchscreen events were not documented
This commit is contained in:
Juan Linietsky 2014-02-25 09:31:47 -03:00
parent 06e358199f
commit b2ce682f6e
20 changed files with 957 additions and 336 deletions

View File

@ -864,7 +864,7 @@ MainLoop* test(TestType p_test) {
if (p_test==TEST_TOKENIZER) {
GDTokenizer tk;
GDTokenizerText tk;
tk.set_code(code);
int line=-1;
while(tk.get_token()!=GDTokenizer::TK_EOF) {
@ -969,8 +969,16 @@ MainLoop* test(TestType p_test) {
} else if (p_test==TEST_BYTECODE) {
Vector<uint8_t> buf = GDTokenizerBuffer::parse_code_string(code);
String dst = test.basename()+".gdc";
FileAccess *fw = FileAccess::open(dst,FileAccess::WRITE);
fw->store_buffer(buf.ptr(),buf.size());
memdelete(fw);
}
#if 0
Parser parser;
Error err = parser.parse(code);

View File

@ -36,7 +36,8 @@ namespace TestGDScript {
enum TestType {
TEST_TOKENIZER,
TEST_PARSER,
TEST_COMPILER
TEST_COMPILER,
TEST_BYTECODE,
};
MainLoop* test(TestType p_type);

View File

@ -152,6 +152,11 @@ MainLoop* test_main(String p_test,const List<String>& p_args) {
return TestGDScript::test(TestGDScript::TEST_COMPILER);
}
if (p_test=="gd_bytecode") {
return TestGDScript::test(TestGDScript::TEST_BYTECODE);
}
if (p_test=="image") {
return TestImage::test();

View File

@ -192,6 +192,7 @@ RES ResourceLoader::load(const String &p_path,const String& p_type_hint,bool p_n
res->set_last_modified_time(mt);
}
#endif
print_line("LOADED: "+res->get_path());
return res;
}
@ -246,7 +247,7 @@ String ResourceLoader::find_complete_path(const String& p_path,const String& p_t
String path = local_path+E->get();
if (FileAccess::exists(path)) {
if (PathRemap::get_singleton()->has_remap(path) || FileAccess::exists(path)) {
candidates.push_back(path);
}

View File

@ -1319,7 +1319,7 @@ void Variant::set(const Variant& p_index, const Variant& p_value, bool *r_valid)
return;
int type=p_value;
if (type<0 || type>=6)
if (type<0 || type>=InputEvent::TYPE_MAX)
return; //fail
valid=true;
ie.type=InputEvent::Type(type);
@ -2765,6 +2765,7 @@ void Variant::get_property_list(List<PropertyInfo> *p_list) const {
InputEvent ie = operator InputEvent();
p_list->push_back( PropertyInfo(Variant::INT,"type"));
p_list->push_back( PropertyInfo(Variant::INT,"device"));
p_list->push_back( PropertyInfo(Variant::INT,"ID"));

View File

@ -142,6 +142,12 @@ public:
}
int find_nearest(const T& p_val) const {
bool exact;
return _find(p_val,exact);
}
_FORCE_INLINE_ int size() const { return _data.size(); }
_FORCE_INLINE_ bool empty() const { return _data.empty(); }

View File

@ -1365,6 +1365,10 @@ void Main::cleanup() {
OS::get_singleton()->_execpath="";
OS::get_singleton()->_local_clipboard="";
#ifdef TOOLS_ENABLED
EditorNode::unregister_editor_types();
#endif
unregister_driver_types();
unregister_module_types();
unregister_scene_types();

View File

@ -106,7 +106,7 @@ bool GDScriptLanguage::has_named_classes() const {
int GDScriptLanguage::find_function(const String& p_function,const String& p_code) const {
GDTokenizer tokenizer;
GDTokenizerText tokenizer;
tokenizer.set_code(p_code);
int indent=0;
while(tokenizer.get_token()!=GDTokenizer::TK_EOF && tokenizer.get_token()!=GDTokenizer::TK_ERROR) {

File diff suppressed because it is too large Load Diff

View File

@ -343,10 +343,12 @@ public:
ProgramNode() { type=TYPE_PROGRAM; }
};
*/
private:
GDTokenizer tokenizer;
GDTokenizer *tokenizer;
Node *head;
@ -380,12 +382,15 @@ private:
void _parse_class(ClassNode *p_class);
bool _end_statement();
Error _parse(const String& p_base_path);
public:
String get_error() const;
int get_error_line() const;
int get_error_column() const;
Error parse(const String& p_code,const String& p_base_path="");
Error parse_bytecode(const Vector<uint8_t> &p_bytecode,const String& p_base_path="");
const Node *get_parse_tree() const;

View File

@ -1416,7 +1416,7 @@ Error GDScript::reload() {
String basedir=path;
if (basedir=="")
basedir==get_path();
basedir=get_path();
if (basedir!="")
basedir=basedir.get_base_dir();
@ -1560,6 +1560,49 @@ void GDScript::_bind_methods() {
}
Error GDScript::load_byte_code(const String& p_path) {
Vector<uint8_t> bytecode = FileAccess::get_file_as_array(p_path);
ERR_FAIL_COND_V(bytecode.size()==0,ERR_PARSE_ERROR);
path=p_path;
String basedir=path;
if (basedir=="")
basedir=get_path();
if (basedir!="")
basedir=basedir.get_base_dir();
valid=false;
GDParser parser;
Error err = parser.parse_bytecode(bytecode,basedir);
if (err) {
_err_print_error("GDScript::load_byte_code",path.empty()?"built-in":(const char*)path.utf8().get_data(),parser.get_error_line(),("Parse Error: "+parser.get_error()).utf8().get_data());
ERR_FAIL_V(ERR_PARSE_ERROR);
}
GDCompiler compiler;
err = compiler.compile(&parser,this);
if (err) {
_err_print_error("GDScript::load_byte_code",path.empty()?"built-in":(const char*)path.utf8().get_data(),compiler.get_error_line(),("Compile Error: "+compiler.get_error()).utf8().get_data());
ERR_FAIL_V(ERR_COMPILATION_FAILED);
}
valid=true;
for(Map<StringName,Ref<GDScript> >::Element *E=subclasses.front();E;E=E->next()) {
_set_subclass_path(E->get(),path);
}
return OK;
}
Error GDScript::load_source_code(const String& p_path) {
@ -2153,6 +2196,19 @@ RES ResourceFormatLoaderGDScript::load(const String &p_path,const String& p_orig
Ref<GDScript> scriptres(script);
if (p_path.ends_with(".gdc")) {
script->set_script_path(p_original_path); // script needs this.
script->set_path(p_original_path);
Error err = script->load_byte_code(p_path);
if (err!=OK) {
ERR_FAIL_COND_V(err!=OK, RES());
}
} else {
Error err = script->load_source_code(p_path);
if (err!=OK) {
@ -2165,12 +2221,14 @@ RES ResourceFormatLoaderGDScript::load(const String &p_path,const String& p_orig
//script->set_name(p_path.get_file());
script->reload();
}
return scriptres;
}
void ResourceFormatLoaderGDScript::get_recognized_extensions(List<String> *p_extensions) const {
p_extensions->push_back("gd");
p_extensions->push_back("gdc");
}
bool ResourceFormatLoaderGDScript::handles_type(const String& p_type) const {
@ -2180,7 +2238,8 @@ bool ResourceFormatLoaderGDScript::handles_type(const String& p_type) const {
String ResourceFormatLoaderGDScript::get_resource_type(const String &p_path) const {
if (p_path.extension().to_lower()=="gd")
String el = p_path.extension().to_lower();
if (el=="gd" || el=="gdc")
return "GDScript";
return "";
}

View File

@ -261,6 +261,7 @@ public:
virtual String get_node_type() const;
void set_script_path(const String& p_path) { path=p_path; } //because subclasses need a path too...
Error load_source_code(const String& p_path);
Error load_byte_code(const String& p_path);
virtual ScriptLanguage *get_language() const;

View File

@ -29,6 +29,9 @@
#include "gd_tokenizer.h"
#include "print_string.h"
#include "gd_functions.h"
#include "io/marshalls.h"
#include "map.h"
const char* GDTokenizer::token_names[TK_MAX]={
"Empty",
"Identifier",
@ -128,7 +131,7 @@ static bool _is_hex(CharType c) {
return (c>='0' && c<='9') || (c>='a' && c<='f') || (c>='A' && c<='F');
}
void GDTokenizer::_make_token(Token p_type) {
void GDTokenizerText::_make_token(Token p_type) {
TokenData &tk=tk_rb[tk_rb_pos];
@ -138,7 +141,7 @@ void GDTokenizer::_make_token(Token p_type) {
tk_rb_pos=(tk_rb_pos+1)%TK_RB_SIZE;
}
void GDTokenizer::_make_identifier(const StringName& p_identifier) {
void GDTokenizerText::_make_identifier(const StringName& p_identifier) {
TokenData &tk=tk_rb[tk_rb_pos];
@ -151,7 +154,7 @@ void GDTokenizer::_make_identifier(const StringName& p_identifier) {
}
void GDTokenizer::_make_built_in_func(GDFunctions::Function p_func) {
void GDTokenizerText::_make_built_in_func(GDFunctions::Function p_func) {
TokenData &tk=tk_rb[tk_rb_pos];
@ -163,7 +166,7 @@ void GDTokenizer::_make_built_in_func(GDFunctions::Function p_func) {
tk_rb_pos=(tk_rb_pos+1)%TK_RB_SIZE;
}
void GDTokenizer::_make_constant(const Variant& p_constant) {
void GDTokenizerText::_make_constant(const Variant& p_constant) {
TokenData &tk=tk_rb[tk_rb_pos];
@ -176,7 +179,7 @@ void GDTokenizer::_make_constant(const Variant& p_constant) {
}
void GDTokenizer::_make_type(const Variant::Type& p_type) {
void GDTokenizerText::_make_type(const Variant::Type& p_type) {
TokenData &tk=tk_rb[tk_rb_pos];
@ -191,7 +194,7 @@ void GDTokenizer::_make_type(const Variant::Type& p_type) {
}
void GDTokenizer::_make_error(const String& p_error) {
void GDTokenizerText::_make_error(const String& p_error) {
error_flag=true;
last_error=p_error;
@ -206,7 +209,7 @@ void GDTokenizer::_make_error(const String& p_error) {
}
void GDTokenizer::_make_newline(int p_spaces) {
void GDTokenizerText::_make_newline(int p_spaces) {
TokenData &tk=tk_rb[tk_rb_pos];
tk.type=TK_NEWLINE;
@ -216,7 +219,7 @@ void GDTokenizer::_make_newline(int p_spaces) {
tk_rb_pos=(tk_rb_pos+1)%TK_RB_SIZE;
}
void GDTokenizer::_advance() {
void GDTokenizerText::_advance() {
if (error_flag) {
//parser broke
@ -859,7 +862,7 @@ void GDTokenizer::_advance() {
}
void GDTokenizer::set_code(const String& p_code) {
void GDTokenizerText::set_code(const String& p_code) {
code=p_code;
len = p_code.length();
@ -878,7 +881,7 @@ void GDTokenizer::set_code(const String& p_code) {
_advance();
}
GDTokenizer::Token GDTokenizer::get_token(int p_offset) const {
GDTokenizerText::Token GDTokenizerText::get_token(int p_offset) const {
ERR_FAIL_COND_V( p_offset <= -MAX_LOOKAHEAD, TK_ERROR);
ERR_FAIL_COND_V( p_offset >= MAX_LOOKAHEAD, TK_ERROR);
@ -886,7 +889,7 @@ GDTokenizer::Token GDTokenizer::get_token(int p_offset) const {
return tk_rb[ofs].type;
}
int GDTokenizer::get_token_line(int p_offset) const {
int GDTokenizerText::get_token_line(int p_offset) const {
ERR_FAIL_COND_V( p_offset <= -MAX_LOOKAHEAD, -1);
ERR_FAIL_COND_V( p_offset >= MAX_LOOKAHEAD, -1);
@ -894,7 +897,7 @@ int GDTokenizer::get_token_line(int p_offset) const {
return tk_rb[ofs].line;
}
int GDTokenizer::get_token_column(int p_offset) const {
int GDTokenizerText::get_token_column(int p_offset) const {
ERR_FAIL_COND_V( p_offset <= -MAX_LOOKAHEAD, -1);
ERR_FAIL_COND_V( p_offset >= MAX_LOOKAHEAD, -1);
@ -902,7 +905,7 @@ int GDTokenizer::get_token_column(int p_offset) const {
return tk_rb[ofs].col;
}
const Variant& GDTokenizer::get_token_constant(int p_offset) const {
const Variant& GDTokenizerText::get_token_constant(int p_offset) const {
ERR_FAIL_COND_V( p_offset <= -MAX_LOOKAHEAD, tk_rb[0].constant);
ERR_FAIL_COND_V( p_offset >= MAX_LOOKAHEAD, tk_rb[0].constant);
@ -910,7 +913,7 @@ const Variant& GDTokenizer::get_token_constant(int p_offset) const {
ERR_FAIL_COND_V(tk_rb[ofs].type!=TK_CONSTANT,tk_rb[0].constant);
return tk_rb[ofs].constant;
}
StringName GDTokenizer::get_token_identifier(int p_offset) const {
StringName GDTokenizerText::get_token_identifier(int p_offset) const {
ERR_FAIL_COND_V( p_offset <= -MAX_LOOKAHEAD, StringName());
ERR_FAIL_COND_V( p_offset >= MAX_LOOKAHEAD, StringName());
@ -921,7 +924,7 @@ StringName GDTokenizer::get_token_identifier(int p_offset) const {
}
GDFunctions::Function GDTokenizer::get_token_built_in_func(int p_offset) const {
GDFunctions::Function GDTokenizerText::get_token_built_in_func(int p_offset) const {
ERR_FAIL_COND_V( p_offset <= -MAX_LOOKAHEAD, GDFunctions::FUNC_MAX);
ERR_FAIL_COND_V( p_offset >= MAX_LOOKAHEAD, GDFunctions::FUNC_MAX);
@ -932,7 +935,7 @@ GDFunctions::Function GDTokenizer::get_token_built_in_func(int p_offset) const {
}
Variant::Type GDTokenizer::get_token_type(int p_offset) const {
Variant::Type GDTokenizerText::get_token_type(int p_offset) const {
ERR_FAIL_COND_V( p_offset <= -MAX_LOOKAHEAD, Variant::NIL);
ERR_FAIL_COND_V( p_offset >= MAX_LOOKAHEAD, Variant::NIL);
@ -944,7 +947,7 @@ Variant::Type GDTokenizer::get_token_type(int p_offset) const {
}
int GDTokenizer::get_token_line_indent(int p_offset) const {
int GDTokenizerText::get_token_line_indent(int p_offset) const {
ERR_FAIL_COND_V( p_offset <= -MAX_LOOKAHEAD, 0);
ERR_FAIL_COND_V( p_offset >= MAX_LOOKAHEAD, 0);
@ -955,7 +958,7 @@ int GDTokenizer::get_token_line_indent(int p_offset) const {
}
String GDTokenizer::get_token_error(int p_offset) const {
String GDTokenizerText::get_token_error(int p_offset) const {
ERR_FAIL_COND_V( p_offset <= -MAX_LOOKAHEAD, String());
ERR_FAIL_COND_V( p_offset >= MAX_LOOKAHEAD, String());
@ -965,9 +968,377 @@ String GDTokenizer::get_token_error(int p_offset) const {
return tk_rb[ofs].constant;
}
void GDTokenizer::advance(int p_amount) {
void GDTokenizerText::advance(int p_amount) {
ERR_FAIL_COND( p_amount <=0 );
for(int i=0;i<p_amount;i++)
_advance();
}
//////////////////////////////////////////////////////////////////////////////////////////////////////
#define BYTECODE_VERSION 1
Error GDTokenizerBuffer::set_code_buffer(const Vector<uint8_t> & p_buffer) {
const uint8_t *buf=p_buffer.ptr();
int total_len=p_buffer.size();
ERR_FAIL_COND_V( p_buffer.size()<24 || p_buffer[0]!='G' || p_buffer[1]!='D' || p_buffer[2]!='S' || p_buffer[3]!='C',ERR_INVALID_DATA);
int version = decode_uint32(&buf[4]);
if (version>1) {
ERR_EXPLAIN("Bytecode is too New!");
ERR_FAIL_COND_V(version>BYTECODE_VERSION,ERR_INVALID_DATA);
}
int identifier_count = decode_uint32(&buf[8]);
int constant_count = decode_uint32(&buf[12]);
int line_count = decode_uint32(&buf[16]);
int token_count = decode_uint32(&buf[20]);
const uint8_t *b=buf;
b=&buf[24];
total_len-=24;
identifiers.resize(identifier_count);
for(int i=0;i<identifier_count;i++) {
int len = decode_uint32(b);
ERR_FAIL_COND_V(len>total_len,ERR_INVALID_DATA);
b+=4;
Vector<uint8_t> cs;
cs.resize(len);
for(int j=0;j<len;j++) {
cs[j]=b[j]^0xb6;
}
cs[cs.size()-1]=0;
String s;
s.parse_utf8((const char*)cs.ptr());
b+=len;
total_len-=len+4;
identifiers[i]=s;
}
constants.resize(constant_count);
for(int i=0;i<constant_count;i++) {
Variant v;
int len;
Error err = decode_variant(v,b,total_len,&len);
if (err)
return err;
b+=len;
total_len-=len;
constants[i]=v;
}
ERR_FAIL_COND_V(line_count*8>total_len,ERR_INVALID_DATA);
for(int i=0;i<line_count;i++) {
uint32_t token=decode_uint32(b);
b+=4;
uint32_t linecol=decode_uint32(b);
b+=4;
lines.insert(token,linecol);
total_len-=8;
}
tokens.resize(token_count);
for(int i=0;i<token_count;i++) {
ERR_FAIL_COND_V( total_len < 1, ERR_INVALID_DATA);
if ((*b)&TOKEN_BYTE_MASK) { //little endian always
ERR_FAIL_COND_V( total_len < 4, ERR_INVALID_DATA);
tokens[i]=decode_uint32(b)&~TOKEN_BYTE_MASK;
b+=4;
} else {
tokens[i]=*b;
b+=1;
total_len--;
}
}
token=0;
return OK;
}
Vector<uint8_t> GDTokenizerBuffer::parse_code_string(const String& p_code) {
Vector<uint8_t> buf;
Map<StringName,int> identifier_map;
HashMap<Variant,int,VariantHasher> constant_map;
Map<uint32_t,int> line_map;
Vector<uint32_t> token_array;
GDTokenizerText tt;
tt.set_code(p_code);
int line=-1;
int col=0;
while(true) {
if (tt.get_token_line()!=line) {
line=tt.get_token_line();
line_map[line]=token_array.size();
}
uint32_t token=tt.get_token();
switch(tt.get_token()) {
case TK_IDENTIFIER: {
StringName id = tt.get_token_identifier();
if (!identifier_map.has(id)) {
int idx = identifier_map.size();
identifier_map[id]=idx;
}
token|=identifier_map[id]<<TOKEN_BITS;
} break;
case TK_CONSTANT: {
Variant c = tt.get_token_constant();
if (!constant_map.has(c)) {
int idx = constant_map.size();
constant_map[c]=idx;
}
token|=constant_map[c]<<TOKEN_BITS;
} break;
case TK_BUILT_IN_TYPE: {
token|=tt.get_token_type()<<TOKEN_BITS;
} break;
case TK_BUILT_IN_FUNC: {
token|=tt.get_token_built_in_func()<<TOKEN_BITS;
} break;
case TK_NEWLINE: {
token|=tt.get_token_line_indent()<<TOKEN_BITS;
} break;
case TK_ERROR: {
ERR_FAIL_V(Vector<uint8_t>());
} break;
default: {}
};
token_array.push_back(token);
if (tt.get_token()==TK_EOF)
break;
tt.advance();
}
//reverse maps
Map<int,StringName> rev_identifier_map;
for(Map<StringName,int>::Element *E=identifier_map.front();E;E=E->next()) {
rev_identifier_map[E->get()]=E->key();
}
Map<int,Variant> rev_constant_map;
const Variant *K =NULL;
while((K=constant_map.next(K))) {
rev_constant_map[constant_map[*K]]=*K;
}
Map<int,uint32_t> rev_line_map;
for(Map<uint32_t,int>::Element *E=line_map.front();E;E=E->next()) {
rev_line_map[E->get()]=E->key();
}
//save header
buf.resize(24);
buf[0]='G';
buf[1]='D';
buf[2]='S';
buf[3]='C';
encode_uint32(BYTECODE_VERSION,&buf[4]);
encode_uint32(identifier_map.size(),&buf[8]);
encode_uint32(constant_map.size(),&buf[12]);
encode_uint32(line_map.size(),&buf[16]);
encode_uint32(token_array.size(),&buf[20]);
//save identifiers
for(Map<int,StringName>::Element *E=rev_identifier_map.front();E;E=E->next()) {
CharString cs = String(E->get()).utf8();
int len = cs.length()+1;
int extra = 4-(len%4);
if (extra==4)
extra=0;
uint8_t ibuf[4];
encode_uint32(len+extra,ibuf);
for(int i=0;i<4;i++) {
buf.push_back(ibuf[i]);
}
for(int i=0;i<len;i++) {
buf.push_back(cs[i]^0xb6);
}
for(int i=0;i<extra;i++) {
buf.push_back(0^0xb6);
}
}
for(Map<int,Variant>::Element *E=rev_constant_map.front();E;E=E->next()) {
int len;
Error err = encode_variant(E->get(),NULL,len);
ERR_FAIL_COND_V(err!=OK,Vector<uint8_t>());
int pos=buf.size();
buf.resize(pos+len);
encode_variant(E->get(),&buf[pos],len);
}
for(Map<int,uint32_t>::Element *E=rev_line_map.front();E;E=E->next()) {
uint8_t ibuf[8];
encode_uint32(E->key(),&ibuf[0]);
encode_uint32(E->get(),&ibuf[4]);
for(int i=0;i<8;i++)
buf.push_back(ibuf[i]);
}
for(int i=0;i<token_array.size();i++) {
uint32_t token = token_array[i];
if (token&~TOKEN_MASK) {
uint8_t buf4[4];
encode_uint32(token_array[i]|TOKEN_BYTE_MASK,&buf4[0]);
for(int j=0;j<4;j++) {
buf.push_back(buf4[j]);
}
} else {
buf.push_back(token);
}
}
return buf;
}
GDTokenizerBuffer::Token GDTokenizerBuffer::get_token(int p_offset) const {
int offset = token+p_offset;
if (offset<0 || offset>=tokens.size())
return TK_EOF;
return GDTokenizerBuffer::Token(tokens[offset]&TOKEN_MASK);
}
StringName GDTokenizerBuffer::get_token_identifier(int p_offset) const{
int offset = token+p_offset;
ERR_FAIL_INDEX_V(offset,tokens.size(),StringName());
uint32_t identifier = tokens[offset]>>TOKEN_BITS;
ERR_FAIL_INDEX_V(identifier,identifiers.size(),StringName());
return identifiers[identifier];
}
GDFunctions::Function GDTokenizerBuffer::get_token_built_in_func(int p_offset) const{
int offset = token+p_offset;
ERR_FAIL_INDEX_V(offset,tokens.size(),GDFunctions::FUNC_MAX);
return GDFunctions::Function(tokens[offset]>>TOKEN_BITS);
}
Variant::Type GDTokenizerBuffer::get_token_type(int p_offset) const{
int offset = token+p_offset;
ERR_FAIL_INDEX_V(offset,tokens.size(),Variant::NIL);
return Variant::Type(tokens[offset]>>TOKEN_BITS);
}
int GDTokenizerBuffer::get_token_line(int p_offset) const{
int offset = token+p_offset;
int pos = lines.find_nearest(offset);
if (pos<0)
return -1;
if (pos>=lines.size())
pos=lines.size()-1;
uint32_t l = lines.getv(pos);
return l&TOKEN_LINE_MASK;
}
int GDTokenizerBuffer::get_token_column(int p_offset) const{
int offset = token+p_offset;
int pos = lines.find_nearest(offset);
if (pos<0)
return -1;
if (pos>=lines.size())
pos=lines.size()-1;
uint32_t l = lines.getv(pos);
return l>>TOKEN_LINE_BITS;
}
int GDTokenizerBuffer::get_token_line_indent(int p_offset) const{
int offset = token+p_offset;
ERR_FAIL_INDEX_V(offset,tokens.size(),0);
return tokens[offset]>>TOKEN_BITS;
}
const Variant& GDTokenizerBuffer::get_token_constant(int p_offset) const{
int offset = token+p_offset;
ERR_FAIL_INDEX_V(offset,tokens.size(),nil);
uint32_t constant = tokens[offset]>>TOKEN_BITS;
ERR_FAIL_INDEX_V(constant,constants.size(),nil);
return constants[constant];
}
String GDTokenizerBuffer::get_token_error(int p_offset) const{
ERR_FAIL_V(String());
}
void GDTokenizerBuffer::advance(int p_amount){
ERR_FAIL_INDEX(p_amount+token,tokens.size());
token+=p_amount;
}
GDTokenizerBuffer::GDTokenizerBuffer(){
token=0;
}

View File

@ -33,6 +33,8 @@
#include "variant.h"
#include "string_db.h"
#include "gd_functions.h"
#include "vmap.h"
class GDTokenizer {
public:
@ -117,11 +119,28 @@ public:
TK_MAX
};
private:
protected:
static const char* token_names[TK_MAX];
public:
static const char *get_token_name(Token p_token);
virtual const Variant& get_token_constant(int p_offset=0) const=0;
virtual Token get_token(int p_offset=0) const=0;
virtual StringName get_token_identifier(int p_offset=0) const=0;
virtual GDFunctions::Function get_token_built_in_func(int p_offset=0) const=0;
virtual Variant::Type get_token_type(int p_offset=0) const=0;
virtual int get_token_line(int p_offset=0) const=0;
virtual int get_token_column(int p_offset=0) const=0;
virtual int get_token_line_indent(int p_offset=0) const=0;
virtual String get_token_error(int p_offset=0) const=0;
virtual void advance(int p_amount=1)=0;
virtual ~GDTokenizer(){};
};
class GDTokenizerText : public GDTokenizer {
enum {
MAX_LOOKAHEAD=4,
TK_RB_SIZE=MAX_LOOKAHEAD*2+1
@ -162,20 +181,59 @@ private:
void _advance();
public:
static const char *get_token_name(Token p_token);
void set_code(const String& p_code);
Token get_token(int p_offset=0) const;
const Variant& get_token_constant(int p_offset=0) const;
StringName get_token_identifier(int p_offset=0) const;
GDFunctions::Function get_token_built_in_func(int p_offset=0) const;
Variant::Type get_token_type(int p_offset=0) const;
int get_token_line(int p_offset=0) const;
int get_token_column(int p_offset=0) const;
int get_token_line_indent(int p_offset=0) const;
virtual Token get_token(int p_offset=0) const;
virtual StringName get_token_identifier(int p_offset=0) const;
virtual GDFunctions::Function get_token_built_in_func(int p_offset=0) const;
virtual Variant::Type get_token_type(int p_offset=0) const;
virtual int get_token_line(int p_offset=0) const;
virtual int get_token_column(int p_offset=0) const;
virtual int get_token_line_indent(int p_offset=0) const;
virtual const Variant& get_token_constant(int p_offset=0) const;
virtual String get_token_error(int p_offset=0) const;
virtual void advance(int p_amount=1);
};
String get_token_error(int p_offset=0) const;
void advance(int p_amount=1);
class GDTokenizerBuffer : public GDTokenizer {
enum {
TOKEN_BYTE_MASK=0x80,
TOKEN_BITS=8,
TOKEN_MASK=(1<<TOKEN_BITS)-1,
TOKEN_LINE_BITS=24,
TOKEN_LINE_MASK=(1<<TOKEN_LINE_BITS)-1,
};
Vector<StringName> identifiers;
Vector<Variant> constants;
VMap<uint32_t,uint32_t> lines;
Vector<uint32_t> tokens;
Variant nil;
int token;
public:
Error set_code_buffer(const Vector<uint8_t> & p_buffer);
static Vector<uint8_t> parse_code_string(const String& p_code);
virtual Token get_token(int p_offset=0) const;
virtual StringName get_token_identifier(int p_offset=0) const;
virtual GDFunctions::Function get_token_built_in_func(int p_offset=0) const;
virtual Variant::Type get_token_type(int p_offset=0) const;
virtual int get_token_line(int p_offset=0) const;
virtual int get_token_column(int p_offset=0) const;
virtual int get_token_line_indent(int p_offset=0) const;
virtual const Variant& get_token_constant(int p_offset=0) const;
virtual String get_token_error(int p_offset=0) const;
virtual void advance(int p_amount=1);
GDTokenizerBuffer();
};
#endif // TOKENIZER_H

View File

@ -13,11 +13,60 @@
#include "gd_script.h"
#include "io/resource_loader.h"
#include "os/file_access.h"
GDScriptLanguage *script_language_gd=NULL;
ResourceFormatLoaderGDScript *resource_loader_gd=NULL;
ResourceFormatSaverGDScript *resource_saver_gd=NULL;
#ifdef TOOLS_ENABLED
#include "tools/editor/editor_import_export.h"
#include "gd_tokenizer.h"
#include "tools/editor/editor_node.h"
class EditorExportGDScript : public EditorExportPlugin {
OBJ_TYPE(EditorExportGDScript,EditorExportPlugin);
public:
virtual Vector<uint8_t> custom_export(String& p_path,const Ref<EditorExportPlatform> &p_platform) {
//compile gdscript to bytecode
if (p_path.ends_with(".gd")) {
Vector<uint8_t> file = FileAccess::get_file_as_array(p_path);
if (file.empty())
return file;
String txt;
txt.parse_utf8((const char*)file.ptr(),file.size());
file = GDTokenizerBuffer::parse_code_string(txt);
if (!file.empty()) {
print_line("PREV: "+p_path);
p_path=p_path.basename()+".gdc";
print_line("NOW: "+p_path);
return file;
}
}
return Vector<uint8_t>();
}
EditorExportGDScript(){}
};
static void register_editor_plugin() {
Ref<EditorExportGDScript> egd = memnew( EditorExportGDScript );
EditorImportExport::get_singleton()->add_export_plugin(egd);
}
#endif
void register_gdscript_types() {
@ -30,6 +79,11 @@ void register_gdscript_types() {
resource_saver_gd=memnew( ResourceFormatSaverGDScript );
ResourceSaver::add_resource_format_saver(resource_saver_gd);
#ifdef TOOLS_ENABLED
EditorNode::add_init_callback(register_editor_plugin);
#endif
}
void unregister_gdscript_types() {

View File

@ -585,8 +585,8 @@ TileMap::TileMap() {
center_x=false;
center_y=false;
fp_adjust=0.1;
fp_adjust=0.1;
fp_adjust=0.4;
fp_adjust=0.4;
}
TileMap::~TileMap() {

View File

@ -3058,6 +3058,12 @@ void EditorNode::register_editor_types() {
// ObjectTypeDB::register_type<EditorPostImport>();
}
void EditorNode::unregister_editor_types() {
_init_callbacks.clear();
}
void EditorNode::stop_child_process() {
_menu_option_confirm(RUN_STOP,false);
@ -3193,6 +3199,7 @@ void EditorNode::_file_dialog_unregister(FileDialog *p_dialog){
singleton->file_dialogs.erase(p_dialog);
}
Vector<EditorNodeInitCallback> EditorNode::_init_callbacks;
Error EditorNode::export_platform(const String& p_platform, const String& p_path, bool p_debug,const String& p_password,bool p_quit_after) {
@ -4155,6 +4162,8 @@ EditorNode::EditorNode() {
// Ref<ImageTexture> it = gui_base->get_icon("logo","Icons");
// OS::get_singleton()->set_icon( it->get_data() );
for(int i=0;i<_init_callbacks.size();i++)
_init_callbacks[i]();
}

View File

@ -91,6 +91,9 @@
typedef void (*EditorNodeInitCallback)();
class EditorNode : public Node {
OBJ_TYPE( EditorNode, Node );
@ -393,6 +396,8 @@ class EditorNode : public Node {
static EditorNode *singleton;
static Vector<EditorNodeInitCallback> _init_callbacks;
protected:
void _notification(int p_what);
static void _bind_methods();
@ -463,6 +468,7 @@ public:
Error export_platform(const String& p_platform, const String& p_path, bool p_debug,const String& p_password,bool p_quit_after=false);
static void register_editor_types();
static void unregister_editor_types();
Control *get_gui_base() { return gui_base; }
@ -481,6 +487,8 @@ public:
~EditorNode();
void get_singleton(const char* arg1, bool arg2);
static void add_init_callback(EditorNodeInitCallback p_callback) { _init_callbacks.push_back(p_callback); }
};

View File

@ -413,7 +413,7 @@ void EditorSettings::_load_defaults() {
set("on_save/compress_binary_resources",true);
set("on_save/save_modified_external_resources",true);
set("on_save/save_paths_as_relative",false);
set("on_save/save_paths_without_extension",true);
set("on_save/save_paths_without_extension",false);
set("text_editor/create_signal_callbacks",true);

View File

@ -884,15 +884,20 @@ Ref<Font> EditorFontImportPlugin::generate_font(const Ref<ResourceImportMetadata
++missing;
};
};
printf("total %i/%i\n", missing, import_chars.size());
print_line("total_chars: "+itos(font_data_list.size()));
/* KERNING */
for(int i=0;i<font_data_list.size();i++) {
if (font_data_list[i]->character>512)
continue;
for(int j=0;j<font_data_list.size();j++) {
if (font_data_list[j]->character>512)
continue;
FT_Vector delta;
FT_Get_Kerning( face, font_data_list[i]->glyph,font_data_list[j]->glyph, FT_KERNING_DEFAULT, &delta );