Jump to content
Maintenance : Final step ×

astroNOT

Premium
  • Posts

    58
  • Joined

  • Last visited

  • Days Won

    1
  • Feedback

    0%

astroNOT last won the day on June 5 2021

astroNOT had the most liked content!

About astroNOT

Recent Profile Visitors

The recent visitors block is disabled and is not being shown to other users.

astroNOT's Achievements

Collaborator

Collaborator (7/16)

  • Very Popular Rare
  • Reacting Well
  • One Year In
  • Dedicated
  • First Post

Recent Badges

184

Reputation

  1. import logging import math import re import shutil from datetime import datetime from termcolor import colored logging.basicConfig(level=logging.INFO, format='%(message)s') logger = logging.getLogger() def replace_group_names(input_file, output_file): with open(input_file, 'r', encoding='utf-8', errors='replace') as file: content = file.read() # Find all group names, including those with Korean characters and any other content group_matches = re.findall(r'Group\t([^\n{]+)', content) # Create a mapping of old names to new generic names name_map = {name.strip(): f'Group_{i + 1}' for i, name in enumerate(group_matches)} # Replace old names with new generic names for old_name, new_name in name_map.items(): content = re.sub(rf'Group\t{re.escape(old_name)}(?=\s*\{{)', f'Group\t{new_name}', content, flags=re.MULTILINE) with open(output_file, 'w', encoding='utf-8') as file: file.write(content) print(f"Processed {len(name_map)} group names.") print(f"Modified content written to {output_file}") def create_backup(file_path): timestamp = datetime.now().strftime("%Y%m%d_%H%M%S") backup_path = f"{file_path}.{timestamp}.bak" shutil.copy2(file_path, backup_path) logger.info(colored(f"Backup created: {backup_path}", "yellow")) def find_groups(content): pattern = r'Group\s+(\S+)\s*\{(.*?)\}' return re.finditer(pattern, content, re.DOTALL) def modify_mob_items(input_file, output_file, action, item_data=None, allow_duplicates=False): with open(input_file, 'r', encoding='utf-8', errors='replace') as file: content = file.read() groups = find_groups(content) remove_all = action == 'remove' and any(m['mob'] == 'all' for item_id in item_data for m in item_data[item_id]) for group_match in groups: group_name = group_match.group(1) group_content = group_match.group(2) lines = group_content.strip().split('\n') header_lines = [line for line in lines if not re.match(r'\s*\d+\s+\d+\s+\d+\s+\d+', line)] item_lines = [line for line in lines if re.match(r'\s*\d+\s+\d+\s+\d+\s+\d+', line)] modified = False if action == 'remove_duplicates': # Create a dictionary to store items with their highest chance item_chances = {} for line in item_lines: parts = line.split() item_id = parts[1] chance = float(parts[3]) quantity = int(parts[2]) if item_id in item_chances: if chance > item_chances[item_id]['chance']: item_chances[item_id] = {'chance': chance, 'quantity': quantity, 'line': line} else: item_chances[item_id] = {'chance': chance, 'quantity': quantity, 'line': line} # Check if we found any duplicates if len(item_chances) < len(item_lines): modified = True # Keep only the items with highest chances item_lines = [item_data['line'] for item_data in item_chances.values()] logger.info(colored( f"Group {group_name}: Removed duplicates, keeping highest chances", "green" )) if action == 'remove': for item_id in item_data: if remove_all or any(m['mob'] == 'all' for m in item_data[item_id]): original_count = len(item_lines) item_lines = [line for line in item_lines if item_id != line.split()[1]] if len(item_lines) < original_count: logger.info(colored(f"Group {group_name}: Removed item {item_id} (all condition)", "green")) modified = True else: mob_match = re.search(r'Mob\s+(\d+)', group_content) if mob_match: mob_id = mob_match.group(1) mob_data = next((m for m in item_data[item_id] if m['mob'] == mob_id), None) if mob_data: original_count = len(item_lines) item_lines = [line for line in item_lines if item_id != line.split()[1]] if len(item_lines) < original_count: logger.info( colored(f"Group {group_name}: Removed item {item_id} from mob {mob_id}", "green")) modified = True elif action == 'add': mob_match = re.search(r'Mob\s+(\d+)', group_content) if mob_match: mob_id = mob_match.group(1) for item_id in item_data: mob_data = next((m for m in item_data[item_id] if m['mob'] == mob_id or m['mob'] == 'all'), None) if mob_data: # Find the maximum chance in the current group max_chance = max(float(line.split()[-1]) for line in item_lines) if item_lines else 400 # Calculate new chance as a percentage of the max chance new_chance = math.ceil((mob_data['chance'] / 100) * max_chance) existing_items = [line for line in item_lines if item_id in line.split()] if existing_items and not allow_duplicates: for existing_item in existing_items: parts = existing_item.split() if mob_data.get('override', True): parts[2] = str(mob_data['quantity']) else: parts[2] = str(int(parts[2]) + mob_data['quantity']) parts[3] = str(new_chance) item_lines = [line if line != existing_item else '\t'.join(parts) for line in item_lines] logger.info(colored( f"Group {group_name}: Updated item {item_id} for mob {mob_id}, new quantity: {parts[2]}, new chance: {new_chance}", "green")) else: new_index = len(item_lines) + 1 new_item = f"\t{new_index}\t{item_id}\t{mob_data['quantity']}\t{new_chance}" item_lines.append(new_item) logger.info(colored( f"Group {group_name}: Added item {item_id} to mob {mob_id} with index {new_index}, chance {new_chance} (target: {mob_data['chance']}% of {max_chance})", "green")) modified = True if modified: # Renumber items item_lines = [ f"\t{i + 1}\t" + '\t'.join(line.split()[1:]) for i, line in enumerate(item_lines) ] # Reconstruct group content new_group_content = '\n'.join(header_lines + item_lines) content = content.replace(group_match.group(0), f"Group\t{group_name}\n{{\n{new_group_content}\n}}") with open(output_file, 'w', encoding='utf-8') as file: file.write(content) logger.info(colored(f"Modified content written to {output_file}", "green")) if __name__ == "__main__": input_file = 'mob_drop_item.txt' output_file = 'mob_drop_item.txt' create_backup(input_file) # Replace Korean group names # replace_group_names(input_file, output_file) # for mob in range(8001, 8010): # item_data_50514 = { # "50514": [{"mob": str(mob), "chance": 110, "quantity": 1}] # } # modify_mob_items(output_file, output_file, item_data_50514, 'add') # item_data_50515 = { # "50515": [{"mob": str(mob), "chance": 17, "quantity": 1}] # } # modify_mob_items(output_file, output_file, item_data_50515, 'add') # # Loop for mobs 8010 to 8027, adding 50514, 50515, and 50525 separately # for mob in range(8009, 8028): # item_data_50514 = { # "50514": [{"mob": str(mob), "chance": 15, "quantity": 1}] # } # modify_mob_items(output_file, output_file, item_data_50514, 'add') # item_data_50515 = { # "50515": [{"mob": str(mob), "chance": 17, "quantity": 1}] # } # modify_mob_items(output_file, output_file, item_data_50515, 'add') # item_data_50525 = { # "50525": [{"mob": str(mob), "chance": 90, "quantity": 1}] # } # modify_mob_items(output_file, output_file, item_data_50525, 'add') # item_data = { # "50525": [{"mob": str(mob), "chance": 0, "quantity": 1}] # } # modify_mob_items(output_file, output_file, item_data, 'add') # To remove items from specific mobs # item_data_remove = {"50300": [{"mob": "8007", "chance": 0, "quantity": 0}]} # modify_mob_items(input_file, output_file, item_data_remove, 'remove') # To remove items from all groups # item_data_remove_all = {"50300": [{"mob": "all", "chance": 0, "quantity": 0}]} # modify_mob_items(input_file, output_file, item_data_remove_all, 'remove') # List of items to remove from all mobs items_to_remove = [ 27003, 27005, 27006, 27002, 30022, 30046, 8003, 376, 396, 1226, 2246, 3266, 5216, 6166, 7346, 21326, 21346, 21366, 21386, 21406, 27621, 27622, 27623, 27624, 30000, 30001, 30002, 30003, 30004, 30005, 30006, 30007, 30008, 30009, 30010, 30011, 30012, 30013, 30014, 30015, 30016, 30017, 30018, 30019, 30020, 30021, 30022, 30023, 30024, 30025, 30026, 30027, 30028, 30029, 30030, 30031, 30032, 30033, 30034, 30035, 30036, 30037, 30038, 30039, 30040, 30041, 30042, 30043, 30044, 30045, 30046, 30047, 30048, 30049, 30050, 30051, 30052, 30053, 30054, 30055, 30056, 30057, 30058, 30059, 30060, 30061, 30062, 30063, 30065, 30066, 30067, 30068, 30069, 30070, 30071, 30072, 30073, 30074, 30075, 30076, 30077, 30078, 30079, 30080, 30081, 30082, 30083, 30084, 30085, 30086, 30087, 30088, 30089, 30090, 30091, 30092, 30101, 30102, 30103, 30104, 30105, 30106, 30107, 30108, 30109, 30110, 30111, 30112, 30113, 30114, 30115, 30116, 30117, 30118, 30119, 30120, 30121, 30122, 30123, 30124, 30125, 30126, 30127, 30128, 30129, 30130, 30131, 30132, 30133, 30134, 30135, 30136, 30137, 30138, 30139, 30140, 30141, 30142, 30143, 30144, 30145, 30146, 30147, 30148, 30149, 30150, 30151, 30152, 30153, 30154, 30155, 30156, 30157, 30158, 30159, 30160, 30161, 30162, 30163, 30164, 30165, 30166, 30167, 30168, 30169, 30170, 30171, 30172, 30173, 30174, 30175, 30176, 30177, 30178, 30179, 30180, 30183, 30184, 30185, 30186, 30187, 30188, 30189, 30190, 30191, 30192, 30193, 30194, 30195, 30196, 30197, 30198, 30199, 30203, 30204, 30205, 30210, 30211, 30212, 30213, 30214, 30215, 30216, 30217, 30218, 30219, 30220, 30221, 30222, 30223, 30224, 30225, 30226, 30227, 30228, 30251, 30252, 30253, 30254, 30265, 30266, 30267, 30268, 30269, 30500, 30501, 30502, 30503, 30504, 30505, 30506, 30507, 30508, 30509, 30510, 30511, 30512, 30513, 30514, 30515, 30516, 30517, 30518, 30519, 30520, 30521, 30522, 30523, 30524, 30525, 30526, 30527, 30528, 30529, 30530, 30531, 30532, 30533, 30534, 30535, 30536, 30537, 30538, 30539, 30540, 30541, 30542, 30543, 30544, 30545, 30546, 30547, 30548, 30549, 30550, 30551, 30552, 30553, 30554, 30555, 30556, 30557, 30558, 30559, 30560, 30561, 30562, 30563, 30564, 30565, 30566, 30567, 30568, 30569, 30570, 30571, 30572, 30573, 30574, 30575, 30576, 30577, 30578, 30579, 30580, 30581, 30582, 30583, 30584, 30585, 30586, 30600, 30601, 30602, 30603, 30604, 30605, 30606, 30607, 30608, 30609, 30610, 30611, 30612, 30613, 30614, 30615, 30616, 30617, 30618, 30619, 30620, 30621, 30622, 30623, 30624, 30625, 30626, 30627, 30628, 30629, 30630, 30639, 30640, 30641, 30642, 30643, 30644, 30645, 30646, 30647, 30648, 30649, 30650, 30651, 30652, 30653, 30654, 30655, 30656, 30657, 30658, 30659, 30660, 30661, 30662, 30663, 30664, 30666, 30667, 30668, 31001, 31002, 31003, 31004, 31006, 31007, 31009, 31010, 31011, 31012, 31013, 31014, 31015, 31016, 31017, 31018, 31019, 31020, 31021, 31022, 31023, 31024, 31025, 31026, 31027, 31028, 31029, 31030, 31031, 31033, 31034, 31035, 31036, 31037, 31038, 31039, 31040, 31041, 31042, 31043, 31044, 31045, 31046, 31047, 31048, 31049, 31050, 31051, 31052, 31053, 31054, 31055, 31056, 31057, 31058, 31059, 31060, 31061, 31062, 31063, 31064, 31065, 31066, 31067, 31068, 31069, 31070, 31071, 31072, 31074, 31075, 31076, 31077, 31079, 31080, 31081, 31082, 31083, 31084, 31085, 31086, 31087, 31088, 31089, 31090, 31092, 31093, 31094, 31095, 31101, 31102, 31103, 31108, 31109, 31110, 31111, 31112, 31113, 31114, 31115, 31117, 31118, 31120, 31121, 31122, 31123, 31124, 31125, 31126, 31127, 31128, 31129, 31130, 31131, 31132, 31133, 31134, 31135, 31136, 31137, 31138, 31145, 31146, 31147, 31149, 31160, 31161, 31180, 31181, 31196, 31197, 31198, 31199, 31200, 31201, 31202, 31203, 31204, 31205, 31206, 31207, 31208, 31209, 31210, 31211, 31212, 31213, 33029, 33030, 33031, 35000, 35001, 39070, 39071, 39072, 39073, 39074, 39075, 39076, 39077, 39078, 39079, 39080, 39085, 39086, 39087, 39088, 39089, 39090, 39091, 39092, 39093, 39094, 39095, 39096, 39097, 39098, 39099, 39100, 39101, 39102, 39103, 39104, 39105, 39106, 39107, 39108, 39110, 39111, 39112, 39117, 50116, 50701, 50702, 50703, 50704, 50705, 50706, 50707, 50708, 50709, 50710, 50711, 50712, 50721, 50722, 50723, 50724, 50725, 50726, 50727, 50728, 50729, 50730, 50731, 50732, 50733, 50734, 50735, 50736, 50737, 50738, 50739, 50740, 50741, 50926, 55003, 55004, 55005, 55006, 55007, 56013, 71095, 71122, 71123, 71129, 71130, 71201, 72342, 76029, 100300, 100400, 100500, 100700, 100701, 30022, 27002, 27003, 27004, 27005, 27006, 27007, 8001, 8002, 8003, 8004, 8005, 30046, 30045, ] # modify_mob_items(input_file, output_file, action='remove_duplicates') # items_to_remove = [50006, 50007, 50008, 50009, 30022, 27100, 27101, 27103, 27104, 27110, 2711] # # # Create the item_wdata_remove_all dictionary # item_data_remove_all = {str(item): [{"mob": "all", "chance": 0, "quantity": 0}] for item in items_to_remove} # # Call the modify_mob_items function # modify_mob_items(input_file, output_file, item_data_remove_all, 'remove') # for mob in range(8019, 8028): # item_data_50514 = { # "27992": [{"mob": str(mob), "chance": 55, "quantity": 1}] # } # modify_mob_items(output_file, output_file, item_data_50514, 'add') # item_data_50514 = { # "27993": [{"mob": str(mob), "chance": 55, "quantity": 1}] # } # modify_mob_items(output_file, output_file, item_data_50514, 'add') # item_data_50514 = { # "27994": [{"mob": str(mob), "chance": 55, "quantity": 1}] # } # modify_mob_items(output_file, output_file, item_data_50514, 'add') # 2191 turtle # 1304 tiger # 1901 fox # 6431 Chief Orc # 1091 Demon King # 1092 Proud Demon King # 1093 Death Reaper # 2091 Queen Spider # 6433 Mighty Ice Witch # 6434 General Huashin bosses = [2191, 1304, 1901, 1091, 6431, 1092, 1093, 2091, 6433, 6434] bosses = [2191] for mob in bosses: item_data_50514 = { "27992": [{"mob": str(mob), "chance": 55, "quantity": 1}] } modify_mob_items(output_file, output_file, item_data_50514, 'add') # item_data_50514 = { # "27993": [{"mob": str(mob), "chance": 55, "quantity": 1}] # } # modify_mob_items(output_file, output_file, item_data_50514, 'add') # item_data_50514 = { # "27994": [{"mob": str(mob), "chance": 55, "quantity": 1}] # } # modify_mob_items(output_file, output_file, item_data_50514, 'add') UPDATE: Now removes group if empty after removing all items from it Also it can be sefely used directly on bsd, in case japanese char appears just deal with them individually, had one item randomly have a japanese name, did not investigate tho Update2: Removes duplicates (all of them), for eg same item 2 dropchances New logic for chance calculation: Goes infinite so u can add 200% chance for eg 100% chance is determined by max drop chance present in a group For eg: item 1 20 item 2 30 100% drop is 30 And based on that 30 the desired percentage is determined, thus 200% means 60 Will proly add a 400 cap (max drop is 400) but not atm
  2. Hello, So I want to somehow have a way of spawning some mobs/do a set of instructions every day at 13 00 for example Can somebody help me accomplish this? You can either send offers here.
  3. In uiinventory.py Search for elif constInfo.ENABLE_SELF_STACK_SCROLLS and srcItemVID in (71052,71051,71084,71085): And add ur item code
  4. Hello! I'm trying to add a new item to the game that behaves similarly to existing items of type USE_SPECIAL, such as 71051. I've created a new item in the proto (let's say 71053) with the same characteristics as 71051, but when I try to use it, the server sends packets with a different header to the client and the item doesn't work correctly. I've checked the proto and it seems to be correctly defined. Is there something specific I need to do to ensure that the server correctly recognizes this new item and handles it properly? On the server side, I've added a new case for 71053 in char_item.cpp, in the UseItemEx function, but my useitem doesn't seem to be called even in this case. Additionally, I've noticed that for the working item (71051), the server receives a packet with header 60, while for the new item (71053), it sends a packet with header 13. Any advice or hints are appreciated. Thank you! Proto for 71051 working item: 71051 재가비서 ITEM_USE USE_SPECIAL 1 ANTI_DROP | ANTI_SELL | ANTI_GIVE | ANTI_PKDROP | ANTI_MYSHOP ITEM_STACKABLE | LOG NONE NONE 0 0 0 0 0 LIMIT_NONE 0 LIMIT_NONE 0 APPLY_NONE 0 APPLY_NONE 0 APPLY_NONE 0 0 0 0 0 0 0 0 0 0 And for 71053 that is not working: 71053 Diff name ITEM_USE USE_SPECIAL 1 ANTI_DROP | ANTI_SELL | ANTI_GIVE | ANTI_PKDROP | ANTI_MYSHOP ITEM_STACKABLE | LOG NONE NONE 0 0 0 0 0 LIMIT_NONE 0 LIMIT_NONE 0 APPLY_NONE 0 APPLY_NONE 0 APPLY_NONE 0 0 0 0 0 0 0 0 0 0 Help a fellow brother out ?
  5. update 999, seems like after adding 3 bonuses on an item it is succesfuly inserted into the db, but client instantly crashes with no syserr when a item with the 3rd bonus is present on the char Do you guys have any advice on where I should add them on client side to be visible? I m expecting some range on python side or something like that but not sure where Fixed the unprocessed packets too, seems like having null attrs in db is not ok fixed the client problem too, had to increase ITEM_ATTRIBUTE_SLOT_RARE_NUM on client side
  6. So I'm trying to add 5 extra bonuses (6/7, but till 10) But with a catch too First 3 bonuses should be added from item_attr_rare table, last 2 from item_attr_rare_pvp (so i added a new table, duplicate from item attr rare) Then I proceeded to increase the item bonus count and limits: enum EItemMisc { ITEM_NAME_MAX_LEN = 24, ITEM_VALUES_MAX_NUM = 6, ITEM_SMALL_DESCR_MAX_LEN = 256, ITEM_LIMIT_MAX_NUM = 2, ITEM_APPLY_MAX_NUM = 3, ITEM_SOCKET_MAX_NUM = 3, ITEM_MAX_COUNT = 200, ITEM_ATTRIBUTE_NORM_NUM = 5, ITEM_ATTRIBUTE_RARE_NUM = 3, #ifdef BETTER_67 ITEM_ATTRIBUTE_RARE_PVP_NUM = 2, #endif ITEM_ATTRIBUTE_NORM_START = 0, ITEM_ATTRIBUTE_NORM_END = ITEM_ATTRIBUTE_NORM_START + ITEM_ATTRIBUTE_NORM_NUM, ITEM_ATTRIBUTE_RARE_START = ITEM_ATTRIBUTE_NORM_END, ITEM_ATTRIBUTE_RARE_END = ITEM_ATTRIBUTE_RARE_START + ITEM_ATTRIBUTE_RARE_NUM, #ifdef BETTER_67 ITEM_ATTRIBUTE_RARE_PVP_START = ITEM_ATTRIBUTE_RARE_END, #endif #ifdef BETTER_67 ITEM_ATTRIBUTE_RARE_PVP_END = ITEM_ATTRIBUTE_RARE_PVP_START + ITEM_ATTRIBUTE_RARE_PVP_NUM, ITEM_ATTRIBUTE_MAX_NUM = ITEM_ATTRIBUTE_RARE_PVP_END, // 7 #else ITEM_ATTRIBUTE_MAX_NUM = ITEM_ATTRIBUTE_RARE_END, // 7 #endif ITEM_ATTRIBUTE_MAX_LEVEL = 5, ITEM_AWARD_WHY_MAX_LEN = 50, REFINE_MATERIAL_MAX_NUM = 5, ITEM_ELK_VNUM = 50026, }; Created a new vector to store these new bonuses: std::vector<TItemAttrTable> m_vec_itemRarePvpTable; // New vector to store item_attr_rare_pvp data And now i read from the db, and for each row create a TItemAttrTable object, which i then add to the above vector: #ifdef BETTER_67 bool CClientManager::InitializeItemRarePvpTable() { char query[4096]; snprintf(query, sizeof(query), "SELECT apply, apply+0, prob, lv1, lv2, lv3, lv4, lv5, weapon, body, wrist, foots, neck, head, shield, ear " #ifdef ENABLE_ITEM_ATTR_COSTUME ", costume_body, costume_hair" #if defined(ENABLE_ITEM_ATTR_COSTUME) && defined(ENABLE_WEAPON_COSTUME_SYSTEM) ", costume_weapon" #endif #endif #ifdef ENABLE_PENDANT_SYSTEM ", pendant" #endif #ifdef ENABLE_GLOVE_SYSTEM ", glove" #endif " FROM item_attr_rare_pvp%s ORDER BY apply", GetTablePostfix()); auto pkMsg(CDBManager::instance().DirectQuery(query)); SQLResult *pRes = pkMsg->Get(); if (!pRes->uiNumRows) { sys_err("no result from item_attr_rare_pvp"); return false; } if (!m_vec_itemRarePvpTable.empty()) { sys_log(0, "RELOAD: item_attr_rare_pvp"); m_vec_itemRarePvpTable.clear(); } m_vec_itemRarePvpTable.reserve(pRes->uiNumRows); MYSQL_ROW data; while ((data = mysql_fetch_row(pRes->pSQLResult))) { TItemAttrTable t{}; int col = 0; strlcpy(t.szApply, data[col++], sizeof(t.szApply)); str_to_number(t.dwApplyIndex, data[col++]); str_to_number(t.dwProb, data[col++]); str_to_number(t.lValues[0], data[col++]); str_to_number(t.lValues[1], data[col++]); str_to_number(t.lValues[2], data[col++]); str_to_number(t.lValues[3], data[col++]); str_to_number(t.lValues[4], data[col++]); str_to_number(t.bMaxLevelBySet[ATTRIBUTE_SET_WEAPON], data[col++]); str_to_number(t.bMaxLevelBySet[ATTRIBUTE_SET_BODY], data[col++]); str_to_number(t.bMaxLevelBySet[ATTRIBUTE_SET_WRIST], data[col++]); str_to_number(t.bMaxLevelBySet[ATTRIBUTE_SET_FOOTS], data[col++]); str_to_number(t.bMaxLevelBySet[ATTRIBUTE_SET_NECK], data[col++]); str_to_number(t.bMaxLevelBySet[ATTRIBUTE_SET_HEAD], data[col++]); str_to_number(t.bMaxLevelBySet[ATTRIBUTE_SET_SHIELD], data[col++]); str_to_number(t.bMaxLevelBySet[ATTRIBUTE_SET_EAR], data[col++]); #ifdef ENABLE_ITEM_ATTR_COSTUME str_to_number(t.bMaxLevelBySet[ATTRIBUTE_SET_COSTUME_BODY], data[col++]); str_to_number(t.bMaxLevelBySet[ATTRIBUTE_SET_COSTUME_HAIR], data[col++]); #if defined(ENABLE_ITEM_ATTR_COSTUME) && defined(ENABLE_WEAPON_COSTUME_SYSTEM) str_to_number(t.bMaxLevelBySet[ATTRIBUTE_SET_COSTUME_WEAPON], data[col++]); #endif #endif #ifdef ENABLE_PENDANT_SYSTEM str_to_number(t.bMaxLevelBySet[ATTRIBUTE_SET_PENDANT], data[col++]); #endif #ifdef ENABLE_GLOVE_SYSTEM str_to_number(t.bMaxLevelBySet[ATTRIBUTE_SET_GLOVE], data[col++]); #endif sys_log(0, "ITEM_RARE_PVP: %-20s %4lu { %3d %3d %3d %3d %3d } { %d %d %d %d %d %d %d" #ifdef ENABLE_ITEM_ATTR_COSTUME " %d %d" #if defined(ENABLE_ITEM_ATTR_COSTUME) && defined(ENABLE_WEAPON_COSTUME_SYSTEM) " %d" #endif #endif #ifdef ENABLE_PENDANT_SYSTEM " %d" #endif #ifdef ENABLE_GLOVE_SYSTEM " %d" #endif " }", t.szApply, t.dwProb, t.lValues[0], t.lValues[1], t.lValues[2], t.lValues[3], t.lValues[4], t.bMaxLevelBySet[ATTRIBUTE_SET_WEAPON], t.bMaxLevelBySet[ATTRIBUTE_SET_BODY], t.bMaxLevelBySet[ATTRIBUTE_SET_WRIST], t.bMaxLevelBySet[ATTRIBUTE_SET_FOOTS], t.bMaxLevelBySet[ATTRIBUTE_SET_NECK], t.bMaxLevelBySet[ATTRIBUTE_SET_HEAD], t.bMaxLevelBySet[ATTRIBUTE_SET_SHIELD], t.bMaxLevelBySet[ATTRIBUTE_SET_EAR] #ifdef ENABLE_ITEM_ATTR_COSTUME , t.bMaxLevelBySet[ATTRIBUTE_SET_COSTUME_BODY], t.bMaxLevelBySet[ATTRIBUTE_SET_COSTUME_HAIR] #if defined(ENABLE_ITEM_ATTR_COSTUME) && defined(ENABLE_WEAPON_COSTUME_SYSTEM) , t.bMaxLevelBySet[ATTRIBUTE_SET_COSTUME_WEAPON] #endif #endif #ifdef ENABLE_PENDANT_SYSTEM , t.bMaxLevelBySet[ATTRIBUTE_SET_PENDANT] #endif #ifdef ENABLE_GLOVE_SYSTEM , t.bMaxLevelBySet[ATTRIBUTE_SET_GLOVE] #endif ); m_vec_itemRarePvpTable.emplace_back(t); } return true; } #endif // BETTER_67 Initialization seem to pass all ok #ifdef BETTER_67 // Initialize the new table if (!InitializeItemRarePvpTable()) { sys_err("InitializeItemRarePvpTable FAILED"); return false; } else { sys_err("Succesfully initialized ItemRarePvpTable"); } #endif The syserr log is temporary, just so I am convinced it worked, or atleast returned true, which it does But the problem is, now it seems that it fails to load items, db core dump, displayed using bt only: (gdb) #0 thr_kill () at thr_kill.S:4 #1 0x20a1fa8b in __raise (s=6) at /usr/src/lib/libc/gen/raise.c:52 #2 0x20ac9beb in abort () at /usr/src/lib/libc/stdlib/abort.c:67 #3 0x0042ab87 in emergency_sig (sig=11) at Main.cpp:53 #4 0x209760b9 in handle_signal (actp=<optimized out>, sig=<optimized out>, info=0xffffd1c0, ucp=0xffffcf00) at /usr/src/lib/libthr/thread/thr_sig.c:303 #5 0x20975660 in thr_sighandler (sig=11, info=0xffffd1c0, _ucp=0xffffcf00) at /usr/src/lib/libthr/thread/thr_sig.c:246 #6 <signal handler called> #7 0x004428c4 in str_to_number (out=@0x2299a531: 0, in=0x30323033 <error: Cannot access memory at address 0x30323033>) at ./../../common/utils.h:48 #8 CreateItemTableFromRes (res=0x22fa7240, pVec=0x4ab978 <CClientManager::RESULT_ITEM_LOAD(CPeer*, st_mysql_res*, unsigned int, unsigned int)::s_items>, dwPID=17) at ClientManagerPlayer.cpp:64 #9 0x00444177 in CClientManager::RESULT_ITEM_LOAD (this=0xffffd410, peer=0x20bef600, pRes=0x22fa7240, dwHandle=4, dwPID=17) at ClientManagerPlayer.cpp:692 #10 0x00443f4c in CClientManager::RESULT_COMPOSITE_PLAYER (this=0xffffd410, peer=0x20bef600, pMsg=0x20bc4570, dwQID=1) at ClientManagerPlayer.cpp:587 #11 0x00431e4e in CClientManager::AnalyzeQueryResult (this=0xffffd410, msg=0x20bc4570) at ClientManager.cpp:2887 #12 0x004318b0 in CClientManager::MainLoop (this=0xffffd410) at ClientManager.cpp:314 #13 0x0042ac99 in main () at Main.cpp:84 I changed nothing in `RESULT_ITEM_LOAD` or any other place, double checked in git pr view, no accidents there neither, I assume changing the bonus range has other several cascading effects and necessary changes? If so, could anybody point me in the right direction? Update, here, fixed the core crash, but I guess something didn't go as planned, altered item table, added the other columns for the new attrs, but somewhere i fucked up bcs now, inventory dosen't work, items are duplicated and alot of other things broke For cache i changed the fush method a bit void CItemCache::OnFlush() { if (m_data.vnum == 0) { char szQuery[QUERY_MAX_LEN]; snprintf(szQuery, sizeof(szQuery), "DELETE FROM item%s WHERE id=%u", GetTablePostfix(), m_data.id); CDBManager::instance().ReturnQuery(szQuery, QID_ITEM_DESTROY, 0, NULL); if (g_test_server) sys_log(0, "ItemCache::Flush : DELETE %u %s", m_data.id, szQuery); } else { TPlayerItem *p = &m_data; const auto setQuery = fmt::format(FMT_COMPILE("id={}, owner_id={}, `window`={}, pos={}, count={}, vnum={}, socket0={}, socket1={}, socket2={}, " "attrtype0={}, attrvalue0={}, " "attrtype1={}, attrvalue1={}, " "attrtype2={}, attrvalue2={}, " "attrtype3={}, attrvalue3={}, " "attrtype4={}, attrvalue4={}, " "attrtype5={}, attrvalue5={}, " "attrtype6={}, attrvalue6={} " #ifdef BETTER_67 ", attrtype7={}, attrvalue7={}, " "attrtype8={}, attrvalue8={}, " "attrtype9={}, attrvalue9={} " #endif ), p->id, p->owner, p->window, p->pos, p->count, p->vnum, p->alSockets[0], p->alSockets[1], p->alSockets[2], p->aAttr[0].bType, p->aAttr[0].sValue, p->aAttr[1].bType, p->aAttr[1].sValue, p->aAttr[2].bType, p->aAttr[2].sValue, p->aAttr[3].bType, p->aAttr[3].sValue, p->aAttr[4].bType, p->aAttr[4].sValue, p->aAttr[5].bType, p->aAttr[5].sValue, p->aAttr[6].bType, p->aAttr[6].sValue #ifdef BETTER_67 , p->aAttr[7].bType, p->aAttr[7].sValue, p->aAttr[8].bType, p->aAttr[8].sValue, p->aAttr[9].bType, p->aAttr[9].sValue #endif ); // @fixme205 const auto itemQuery = fmt::format(FMT_COMPILE("INSERT INTO item{} SET {} ON DUPLICATE KEY UPDATE {}"), GetTablePostfix(), setQuery, setQuery); if (g_test_server) sys_log(0, "ItemCache::Flush :REPLACE (%s)", itemQuery.c_str()); CDBManager::instance().ReturnQuery(itemQuery.c_str(), QID_ITEM_SAVE, 0, NULL); ++g_item_count; } m_bNeedQuery = false; } Is there any other place i gotta change caching? What basically happens now, client seems to not understand anything anymore, chat dosent work after the things start bugging out But hopefully i think i understand i m missing something on client side too bcs there are some chat packets that are being raised after moving items around in the inv 0528 13:29:18383 :: Unprocessed packet header 137, state Game 0528 13:29:19900 :: Unprocessed packet header 73, state Game 0528 13:29:24570 :: Unknown packet header: 102, last: 61 124 [Hidden Content]
  7. So what I m trying to do is to add 5 extra special bonuses (basically 6/7 but up to 10) What I wanna do is: The first 3 special bonuses to be PVM Last 2 pvp What I really wanna do is to avoid creating a new ITEM_ATTRIBUTE_RARE_RARE and read from a whole different table and have to go through all the hustle Ideally I'd like to somehow have minimum changes to the existing 6/7 flow to be able to mark bonuses in the db somehow to the degree that Case 71051 adds 3 bonuses and only pvm from a list available only for case 71051 and Case 71052 adds 2 extra pvp fetched from a list available in 71052 The original base for 71051 is: case 71051 : { LPITEM item2; if (!IsValidItemPosition(DestCell) || !(item2 = GetInventoryItem(wDestCell))) return false; if (ITEM_COSTUME == item2->GetType()) // @fixme124 { ChatPacket(CHAT_TYPE_INFO, LC_TEXT("¼Ó¼ºÀ» º¯°æÇÒ ¼ö ¾ø´Â ¾ÆÀÌÅÛÀÔ´Ï´Ù.")); return false; } if (item2->IsExchanging() || item2->IsEquipped()) // @fixme114 return false; if (item2->GetAttributeSetIndex() == -1) { ChatPacket(CHAT_TYPE_INFO, LC_TEXT("¼Ó¼ºÀ» º¯°æÇÒ ¼ö ¾ø´Â ¾ÆÀÌÅÛÀÔ´Ï´Ù.")); return false; } #ifdef ENABLE_ITEM_RARE_ATTR_LEVEL_PCT if (item2->AddRareAttribute2()) #else if (item2->AddRareAttribute()) #endif { ChatPacket(CHAT_TYPE_INFO, LC_TEXT("¼º°øÀûÀ¸·Î ¼Ó¼ºÀÌ Ãß°¡ µÇ¾ú½À´Ï´Ù")); int iAddedIdx = item2->GetRareAttrCount() + 4; char buf[21]; snprintf(buf, sizeof(buf), "%u", item2->GetID()); LogManager::instance().ItemLog( GetPlayerID(), item2->GetAttributeType(iAddedIdx), item2->GetAttributeValue(iAddedIdx), item->GetID(), "ADD_RARE_ATTR", buf, GetDesc()->GetHostName(), item->GetOriginalVnum()); item->SetCount(item->GetCount() - 1); } else { ChatPacket(CHAT_TYPE_INFO, LC_TEXT("´õ ÀÌ»ó ÀÌ ¾ÆÀÌÅÛÀ¸·Î ¼Ó¼ºÀ» Ãß°¡ÇÒ ¼ö ¾ø½À´Ï´Ù")); } } break; The add rare attrs methods are: bool CItem::AddRareAttribute2(const int * aiAttrPercentTable) { int count = GetRareAttrCount(); if (count >= ITEM_ATTRIBUTE_RARE_NUM) return false; static const int aiItemAddAttributePercent[ITEM_ATTRIBUTE_MAX_LEVEL] = { 40, 50, 10, 0, 0 }; if (aiAttrPercentTable == NULL) aiAttrPercentTable = aiItemAddAttributePercent; if (GetRareAttrCount() < MAX_RARE_ATTR_NUM) PutRareAttribute(aiAttrPercentTable); return true; } void CItem::PutRareAttribute(const int * aiAttrPercentTable) { int iAttrLevelPercent = number(1, 100); int i; for (i = 0; i < ITEM_ATTRIBUTE_MAX_LEVEL; ++i) { if (iAttrLevelPercent <= aiAttrPercentTable[i]) break; iAttrLevelPercent -= aiAttrPercentTable[i]; } PutRareAttributeWithLevel(i + 1); } void CItem::PutRareAttributeWithLevel(BYTE bLevel) { int iAttributeSet = GetAttributeSetIndex(); if (iAttributeSet < 0) return; if (bLevel > ITEM_ATTRIBUTE_MAX_LEVEL) return; std::vector<int> avail; int total = 0; for (int i = 0; i < MAX_APPLY_NUM; ++i) { const TItemAttrTable & r = g_map_itemRare[i]; if (r.bMaxLevelBySet[iAttributeSet] && !HasRareAttr(i)) { avail.emplace_back(i); total += r.dwProb; } } unsigned int prob = number(1, total); int attr_idx = APPLY_NONE; for (DWORD i = 0; i < avail.size(); ++i) { const TItemAttrTable & r = g_map_itemRare[avail[i]]; if (prob <= r.dwProb) { attr_idx = avail[i]; break; } prob -= r.dwProb; } if (!attr_idx) { sys_err("Cannot put item rare attribute %d %d", iAttributeSet, bLevel); return; } const TItemAttrTable & r = g_map_itemRare[attr_idx]; if (bLevel > r.bMaxLevelBySet[iAttributeSet]) bLevel = r.bMaxLevelBySet[iAttributeSet]; AddRareAttr(attr_idx, bLevel); } void CItem::AddRareAttr(BYTE bApply, BYTE bLevel) { if (HasRareAttr(bApply)) return; if (bLevel <= 0) return; int i = ITEM_ATTRIBUTE_RARE_START + GetRareAttrCount(); if (i == ITEM_ATTRIBUTE_RARE_END) sys_err("item rare attribute overflow!"); else { const TItemAttrTable & r = g_map_itemRare[bApply]; long lVal = r.lValues[MIN(4, bLevel - 1)]; if (lVal) SetForceAttribute(i, bApply, lVal); } } Would the easiest way to do it is to just create a new table, right?
  8. Thank you so very much @ Syreldar and @ Mitachi! I and probably the community are very grateful for such useful explanation!
  9. So at times, i m getting this db.syserr log: socket_accept: accept: Software caused connection abort (fd 22) Has anybody encountered this and has any suggestion on how to pinpoint why is it happaning ? Also server does not crash bcs of it, so no .core generated
  10. So lets say I have this mob drop for 8027, lvl 90 stone Group 전운석 { Mob 8027 Type drop 1 151 1 3 2 152 1 12 3 153 1 8 4 5091 1 5 5 5092 1 15 6 5093 1 10 7 141 1 5 8 142 1 15 9 143 1 10 10 3131 1 10 11 3132 1 15 12 3133 1 20 13 1101 1 10 14 1102 1 15 15 1103 1 20 16 7131 1 10 17 7132 1 15 18 7133 1 20 19 2141 1 10 20 2142 1 15 21 2143 1 20 22 11681 1 10 23 11682 1 15 24 11683 1 20 25 11881 1 10 26 11882 1 15 27 11883 1 20 28 11481 1 10 29 11482 1 15 30 11483 1 20 31 11281 1 10 32 11282 1 15 33 11283 1 20 34 17164 1 9 35 17165 1 24 36 17166 1 14 37 11490 1 3 38 11492 1 8 39 11493 1 10 40 11290 1 3 41 11292 1 8 42 11293 1 10 43 11690 1 3 44 11692 1 8 45 11693 1 10 46 11890 1 3 47 11892 1 8 48 11893 1 10 49 27992 1 3 50 27993 1 3 51 27994 1 3 52 14201 1 5 53 14202 1 10 54 14203 1 15 55 16201 1 5 56 16202 1 10 57 16203 1 15 58 17201 1 5 59 17202 1 10 60 17203 1 15 61 15201 1 5 62 15202 1 10 63 15203 1 15 64 13060 1 5 65 13061 1 10 66 13063 1 15 67 13080 1 5 68 13081 1 10 69 13083 1 15 70 13100 1 5 71 13101 1 10 72 13102 1 15 73 13120 1 5 74 13121 1 10 75 13122 1 15 76 17107 1 5 77 17108 1 10 78 17109 1 15 79 101 1 100 } For this drop settings, it drops 1-2 items, sometimes none, very rare 4-5 items, but ~never more Whats the logic here, how does this work, why isn't like 79 always given when the stone dies?
  11. Ransomware just false positive i assume? Also ************** Exception Text ************** System.Exception: 0x33545844 texture compression not implemented. For this map [Hidden Content]
  12. There's no LLM atm that can "remember" as many tokens as a full game source requires, even gemini, it will only mean it has a long input stream, not that it remembers much of it, so we'll still have to wait Anyhow llms are definitelly good, for me gpts was the best at cpp for example
  13. Hello, here's a item list generator, for them lazy people (or with really tired eyes) Adds at eof, new item upgrades and respective location for icon/textures Req python 3.11 import re def generate_item_code(items): item_codes = [] for item in items: icon_location = item["icon_location"] texture_location = item["texture_location"] vnums = item["vnums"] for item_id in vnums: for i in range(10): item_code = f"{item_id + i}\tWEAPON\t{icon_location.replace('*', str(item_id))}\t{texture_location.replace('*', str(item_id))}" item_codes.append(item_code) return item_codes def write_to_txt(item_codes, file_path): with open(file_path, "r+") as file: existing_lines = file.readlines() for item_code in item_codes: if item_code + "\n" in existing_lines: item_vnum = re.split(r'\t+', item_code)[0] print(f"SKIPPED - Item code {item_vnum} already exists in item list") else: file.write(item_code + "\n") print(item_code) if __name__ == "__main__": # In order to use the locations params # If ur file requires any prefix "0*.tga", before * include ur prefix, also ur desired extension .tga in my case location_dict = [ { "icon_location": "icon/item/0*.tga", "texture_location": "d:/ymir work/item/weapon/0*.gr2", "vnums": [8190, 8290] } ] item_codes = generate_item_code(location_dict) txt_file_path = r"client_location\item_list.txt" write_to_txt(item_codes, txt_file_path)
  14. Can confirm that adding any instruction prior to while idle, seems to not give enough time for the maps & mobs to load try { // Hardcoded values const DWORD dwVnum = 101; const int count = 10; const bool isAggressive = false; const int iMapIndex = 352; const int iMapX = 360; const int iMapY = 360; // Assume SECTREE_MANAGER and CHARACTER_MANAGER are properly initialized and available PIXEL_POSITION pos; if (!SECTREE_MANAGER::instance().GetMapBasePositionByMapIndex(iMapIndex, pos)) { sys_log(0, "PPPPPPQQQ Error: Cannot find base position in this map %d", iMapIndex); } const CMob *pMonster = CMobManager::instance().Get(dwVnum); if (pMonster == NULL) { sys_log(0, "PPPPPPQQQ Error: No mob data for VNUM %d", dwVnum); } size_t SpawnCount = 0; for (size_t i = 0; i < count; ++i) { LPCHARACTER pSpawnMonster = CHARACTER_MANAGER::instance().SpawnMobRange( dwVnum, iMapIndex, pos.x - number(5, 5) + (iMapX * 100), pos.y - number(5, 5) + (iMapY * 100), pos.x + number(5, 5) + (iMapX * 100), pos.y + number(5, 5) + (iMapY * 100), false, pMonster->m_table.bType == CHAR_TYPE_MONSTER, isAggressive); if (pSpawnMonster != NULL) { SpawnCount++; } } sys_log(0, "Spawned %u monsters successfully.", SpawnCount); } catch (const std::exception &e) { sys_log(0, "An exception occurred: %s", e.what()); } while (idle()) ; What it logs is: ./srv1/chan/ch1/core1/syslog:Apr 17 19:23:11 :: PPPPPPQQQ Error: Cannot find base position in this map 352 ./srv1/chan/ch1/core1/syslog:Apr 17 19:23:11 :: PPPPPPQQQ Error: No mob data for VNUM 101 Trial and error continues ?
  15. Hello! Lets say I have the follow scenario: At server startup: I want to spawn two mobs, as an example function(correctness does not matter) void SpawnMobs(DWORD mob_vnum, int count, int map_index, int x, int y) { for (int i = 0; i < count; ++i) { LPCHARACTER mob = CHARACTER_MANAGER::instance().SpawnMob(mob_vnum, map_index, x, y, 0, false, -1); if (!mob) sys_err("Failed to spawn mob VNUM %d on map %d at (%d, %d)", mob_vnum, map_index, x, y); } } And at a given date-time, to make the mobs move to a specific location(Again function correctness is probably wrong, but this is just a POCE) void MoveMobTo(DWORD mob_vid, int target_x, int target_y) { using namespace std::chrono; //April 18, 2024, at 18:00 std::tm scheduled_time = {}; scheduled_time.tm_year = 2024 scheduled_time.tm_mon = 4 - 1; scheduled_time.tm_mday = 18; scheduled_time.tm_hour = 18; scheduled_time.tm_min = 0; scheduled_time.tm_sec = 0; auto scheduled_time_t = std::mktime(&scheduled_time); system_clock::time_point scheduled_tp = system_clock::from_time_t(scheduled_time_t); // Get current time system_clock::time_point now = system_clock::now(); // Check if the current time matches the scheduled time if (now == scheduled_tp) { LPCHARACTER mob = CHARACTER_MANAGER::instance().Find(mob_vid); if (mob) { mob->Goto(target_x, target_y); sys_log(0, "Mob with VID %d moved to (%d, %d) as scheduled", mob_vid, target_x, target_y); } else { sys_err("Failed to find mob with VID %d to move", mob_vid); } } else { sys_log(0, "MoveMobTo called, but it is not the scheduled time yet."); } } I have two main questions 1. !Where should i be calling these functions within the source game, in main seems like it might not be a good idea 2. Checking time constantly until desired time is true, seems quite tricky, a while true surely is not good, if u have any basic suggestions it would be more then welcomed ? Basically my goal is to have mobs spawn, if not already spawned, move them at a specific hour to a exact spot on the map, make em fight each other, while fighting pc can't attack, after pc can attack the surviver, but not instantly, spawn another boss, keep it there for a specific time, if not dead, despawn it Realistically, if i'd know where to call my functions with the instructions and how to not use while loops to check for specific states, it would be enough ? Thank you in advance!
×
×
  • Create New...

Important Information

Terms of Use / Privacy Policy / Guidelines / We have placed cookies on your device to help make this website better. You can adjust your cookie settings, otherwise we'll assume you're okay to continue.