public static void logErrorAPI(Throwable error, Class<?> classFile) { StringBuilder msg = new StringBuilder("API error! Please update your mods. Error: "); msg.append(error); StackTraceElement[] stackTrace = error.getStackTrace(); if (stackTrace.length > 0) { msg.append(", ").append(stackTrace[0]); } logger.log(Level.ERROR, msg.toString()); if (classFile != null) { msg.append("API error: ").append(classFile.getSimpleName()).append(" is loaded from ").append(classFile.getProtectionDomain() .getCodeSource().getLocation()); logger.log(Level.ERROR, msg.toString()); } }
@Before public void setUp() throws Exception { registry = RegistryService.getMetricRegistry(); meters = new HashMap<>(); meters.put("total", getMeter(APPENDS_BASE_NAME.submetric("total"))); meters.put("trace", getMeter(APPENDS_BASE_NAME.withTags("level", "trace"))); meters.put("debug", getMeter(APPENDS_BASE_NAME.withTags("level", "debug"))); meters.put("info", getMeter(APPENDS_BASE_NAME.withTags("level", "info"))); meters.put("warn", getMeter(APPENDS_BASE_NAME.withTags("level", "warn"))); meters.put("error", getMeter(APPENDS_BASE_NAME.withTags("level", "error"))); meters.put("fatal", getMeter(APPENDS_BASE_NAME.withTags("level", "fatal"))); meters.put("throwCount", getMeter(THROWABLES_BASE_NAME.submetric("total"))); meters.put("throw[RuntimeException]", getMeter(THROWABLES_BASE_NAME .withTags("class", RuntimeException.class.getName()) )); logger = LogManager.getLogger(Log4J2InstrumentationTest.class.getName()); origLevel = logger.getLevel(); setLogLevel(logger, Level.ALL); }
public static void main(String[] args) { final LoggerContext ctx = (LoggerContext) LogManager.getContext(false); final Configuration config = ctx.getConfiguration(); config.getLoggerConfig(strategy.Portfolio.class.getName()).setLevel(Level.WARN); ctx.updateLoggers(config); final CommonParam cp = ParamManager.getCommonParam("cu", TIME_FRAME.DAY, "19980101 000000", "20160916 170000"); StrategyOptimizer so = new StrategyOptimizer(tester.RealStrategyTester.class); so.setInstrumentParam(cp.instrument, cp.tf); so.setTestDateRange((int) DateTimeHelper.Ldt2Long(cp.start_date), (int) DateTimeHelper.Ldt2Long(cp.end_date)); int num = so.setStrategyParamRange(ChannelBreakStrategy.class, new Integer[]{4, 800, 2}); System.out.println(num); so.StartOptimization(); Set<Entry<Object[],Performances>> entryset = so.result_db.entrySet(); for (Entry<Object[],Performances> entry : entryset) { for (Object obj : entry.getKey()) { System.out.print(obj + ",\t"); } System.out.println("ProfitRatio: " + entry.getValue().ProfitRatio + "\tMaxDrawDown: " + entry.getValue().MaxDrawDown); } }
@Override public void execute(FunctionContext context) { Cache cache = CacheFactory.getAnyInstance(); Map<String, String> result = new HashMap<String, String>(); try { LogWriterLogger logwriterLogger = (LogWriterLogger) cache.getLogger(); Object[] args = (Object[]) context.getArguments(); final String logLevel = (String) args[0]; Level log4jLevel = LogWriterLogger.logWriterNametoLog4jLevel(logLevel); logwriterLogger.setLevel(log4jLevel); System.setProperty(DistributionConfig.GEMFIRE_PREFIX + LOG_LEVEL, logLevel); // LOG:CONFIG: logger.info(LogMarker.CONFIG, "GFSH Changed log level to {}", log4jLevel); result.put(cache.getDistributedSystem().getDistributedMember().getId(), "New log level is " + log4jLevel); context.getResultSender().lastResult(result); } catch (Exception ex) { // LOG:CONFIG: logger.info(LogMarker.CONFIG, "GFSH Changing log level exception {}", ex.getMessage(), ex); result.put(cache.getDistributedSystem().getDistributedMember().getId(), "ChangeLogLevelFunction exception " + ex.getMessage()); context.getResultSender().lastResult(result); } }
public static Ticket requestPlayerTicket(Object mod, String player, World world, Type type) { ModContainer mc = getContainer(mod); if (mc == null) { FMLLog.log(Level.ERROR, "Failed to locate the container for mod instance %s (%s : %x)", mod, mod.getClass().getName(), System.identityHashCode(mod)); return null; } if (playerTickets.get(player).size()>playerTicketLength) { FMLLog.warning("Unable to assign further chunkloading tickets to player %s (on behalf of mod %s)", player, mc.getModId()); return null; } Ticket ticket = new Ticket(mc.getModId(),type,world,player); playerTickets.put(player, ticket); tickets.get(world).put("Forge", ticket); return ticket; }
@Override protected final void decode(ChannelHandlerContext ctx, FMLProxyPacket msg, List<Object> out) throws Exception { testMessageValidity(msg); ByteBuf payload = msg.payload().duplicate(); if (payload.readableBytes() < 1) { FMLLog.log(Level.ERROR, "The FMLIndexedCodec has received an empty buffer on channel %s, likely a result of a LAN server issue. Pipeline parts : %s", ctx.channel().attr(NetworkRegistry.FML_CHANNEL), ctx.pipeline().toString()); } byte discriminator = payload.readByte(); Class<? extends A> clazz = discriminators.get(discriminator); if(clazz == null) { throw new NullPointerException("Undefined message for discriminator " + discriminator + " in channel " + msg.channel()); } A newMsg = clazz.newInstance(); ctx.attr(INBOUNDPACKETTRACKER).get().set(new WeakReference<FMLProxyPacket>(msg)); decodeInto(ctx, payload.slice(), newMsg); out.add(newMsg); }
/** * Looks up the logger in the logger factory, * and attempts to find the real logger instance * based on the underlying logging framework * and retrieve the logger object. Then, updates the level. * This functionality at this point is heavily dependant * on the log4j API. * * @param loggerName the logger name * @param loggerLevel the logger level * @param additive the additive nature of the logger * @param request the request * @param response the response * @throws Exception the exception */ @PostMapping(value = "/updateLoggerLevel") @ResponseBody public void updateLoggerLevel(@RequestParam final String loggerName, @RequestParam final String loggerLevel, @RequestParam(defaultValue = "false") final boolean additive, final HttpServletRequest request, final HttpServletResponse response) throws Exception { ensureEndpointAccessIsAuthorized(request, response); Assert.notNull(this.loggerContext); final Collection<LoggerConfig> loggerConfigs = getLoggerConfigurations(); loggerConfigs.stream(). filter(cfg -> cfg.getName().equals(loggerName)) .forEachOrdered(cfg -> { cfg.setLevel(Level.getLevel(loggerLevel)); cfg.setAdditive(additive); }); this.loggerContext.updateLoggers(); }
private void removeStone(World world, Random random, int x, int z) { int id = world.getBiomeGenForCoords(x, z).biomeID; Block block = Blocks.stone; if(id == 35 || id == 163 || id == 29 || id == 157 || id == 6 || id == 134 || id == 160 || id == 161 || id == 32 || id == 33) { block = Granite; } else if(id == 36 || id == 164 || id == 16 || id == 14 || id == 15 || id == 0 || id == 24 || id == 26) { block = Basalt; } else if(id == 2 || id == 1 || id == 7 || id == 129 || id == 5 || id == 30 || id == 11) { block = Limestone; } else if(id == 130 || id == 17 || id == 21 || id == 149 || id == 23 || id == 151 || id == 22 || id == 133 || id == 155 || id == 19 || id == 31 || id == 158 || id == 27) { block = Shale; } else if(id == 37 || id == 165 || id == 132 || id == 4 || id == 3 || id == 131 || id == 34 || id == 162 || id == 28 || id == 156 || id == 25) { block = Slate; } else if(id == 39 || id == 167 || id == 38 || id == 166 || id == 18 || id == 13 || id == 12 || id == 140) { block = Gneiss; } else { FMLLog.log(Level.ERROR, Technical.modName + ": TechnicalWorldGenerator could not find stone type for " + world.getBiomeGenForCoords(x, z).biomeName + " (id " + id + "). Please report this to the mod author(s)"); } for(int y = 0; y < world.getActualHeight(); y++) { if(world.getBlock(x, y, z) == Blocks.stone) world.setBlock(x, y, z, block, 0, 0); } }
private static void syncConfig(boolean load) { List<String> propOrder = new ArrayList<String>(); try { Property prop = null; if(!config.isChild) { if(load) { config.load(); } } biomeIDSpace = getIntegerConfigNode(config, prop, propOrder, Constants.CONFIG_CATEGORY_DIMENSIONS, "biomeIDSpace", "Biome ID for Space.", 100); config.setCategoryPropertyOrder(CATEGORY_GENERAL, propOrder); if (config.hasChanged()) { config.save(); } }catch (final Exception ex) { FMLLog.log(Level.ERROR, ex, "Trappist-1 has a problem loading it's config, this can have negative repercussions."); } }
public static void main(String[] args) { final LoggerContext ctx = (LoggerContext) LogManager.getContext(false); final Configuration config = ctx.getConfiguration(); config.getLoggerConfig(strategy.Portfolio.class.getName()).setLevel(Level.WARN); ctx.updateLoggers(config); final CommonParam cp = ParamManager.getCommonParam("i", TIME_FRAME.MIN15, "20080101 000000", "20160101 170000"); StrategyOptimizer so = new StrategyOptimizer(tester.RealStrategyTester.class); so.setInstrumentParam(cp.instrument, cp.tf); so.setTestDateRange((int) DateTimeHelper.Ldt2Long(cp.start_date), (int) DateTimeHelper.Ldt2Long(cp.end_date)); int num = so.setStrategyParamRange(MaPsarStrategy.class, new Integer[]{12, 500, 2}, new String[]{MA.MODE_EMA, MA.MODE_SMMA}, new APPLIED_PRICE[] {APPLIED_PRICE.PRICE_CLOSE, APPLIED_PRICE.PRICE_TYPICAL}, new Float[]{0.01f, 0.02f, 0.01f}, new Float[]{0.1f, 0.2f, 0.02f}); System.out.println(num); so.StartOptimization(); Set<Entry<Object[],Performances>> entryset = so.result_db.entrySet(); for (Entry<Object[],Performances> entry : entryset) { for (Object obj : entry.getKey()) { System.out.print(obj + ",\t"); } System.out.println("ProfitRatio: " + String.format("%.5f", entry.getValue().ProfitRatio) + "\tMaxDrawDown: " + entry.getValue().MaxDrawDown); } }
public void updateEntity() { FMLLog.log(Level.INFO, "A"); if(!worldObj.isRemote) { useItemToGetEnergy(); if(canCraft()) { FMLLog.log(Level.INFO, "C"); progress++; currentEnergy--; if(progress >= totalTime) { FMLLog.log(Level.INFO, "D"); craftItem(); } } } FMLLog.log(Level.INFO, "N"); this.markDirty(); }
public static boolean overwriteAllText(File file, String text) { if (file.exists()) { if (!file.delete()) { LogUtil.log(Level.ERROR, "Could not delete file to be overwritten: " + file.getPath()); return false; } } try { Files.write(file.toPath(), text.getBytes(), StandardOpenOption.CREATE); return true; } catch (IOException e) { LogUtil.log(Level.ERROR, "IOException occurred saving " + file.getPath()); LogUtil.log(""); LogUtil.log(Level.ERROR, e.toString()); return false; } }
@Override protected InputStream getInputStreamByName(String resourceName) throws IOException { try { return super.getInputStreamByName(resourceName); } catch (IOException ioe) { if ("pack.mcmeta".equals(resourceName)) { FMLLog.log(container.getName(), Level.DEBUG, "Mod %s is missing a pack.mcmeta file, substituting a dummy one", container.getName()); return new ByteArrayInputStream(("{\n" + " \"pack\": {\n"+ " \"description\": \"dummy FML pack for "+container.getName()+"\",\n"+ " \"pack_format\": 1\n"+ "}\n" + "}").getBytes(Charsets.UTF_8)); } else throw ioe; } }
public void preInit(FMLPreInitializationEvent event) { File directory = event.getModConfigurationDirectory(); config = new Configuration(new File(directory.getPath(), "adventurers_toolbox.cfg")); Config.readConfig(); MinecraftForge.EVENT_BUS.register(new HandpickHarvestHandler()); MinecraftForge.EVENT_BUS.register(new SpecialToolAbilityHandler()); MinecraftForge.EVENT_BUS.register(new HammerHandler()); MinecraftForge.EVENT_BUS.register(new WeaponHandler()); MinecraftForge.EVENT_BUS.register(new WorldHandler()); ModMaterials.init(); Toolbox.logger.log(Level.INFO, "Initialized tool part materials with " + Materials.head_registry.size() + " head materials, " + Materials.haft_registry.size() + " haft materials, " + Materials.handle_registry.size() + " handle materials, and " + Materials.adornment_registry.size() + " adornment materials"); ModEntities.init(); if (Loader.isModLoaded("tconstruct") && Config.ENABLE_TINKERS_COMPAT) { TConstructCompat.preInit(); } }
public static void fillAlwaysBreakable() { alwaysBreakable = new ArrayList<>(); for (String s : alwaysBreakableStrings) { Block b = RegistryUtil.getBlockFromRegistry(s); if (b == null) { LogUtil.log(Level.ERROR, "No block found matching '" + s + "'."); } else { alwaysBreakable.add(b); } } }
public EntityPlayer getPlayerFromMessageContext(MessageContext ctx) { switch (ctx.side) { case CLIENT: LogUtil.log(Level.ERROR, "Message for CLIENT received for dedicated server"); return null; case SERVER: return ctx.getServerHandler().player; default: LogUtil.log(Level.ERROR, "Invalid side in PacketHandler: " + ctx.side); return null; } }
public boolean allowedByTier(ItemStack[] itemStacks, Tier tier) { Iterator<Entry<ItemStack[], Tier>> iterator = tierList.entrySet().iterator(); Entry<ItemStack[], Tier> entry; do { if(!iterator.hasNext()) { return false; } entry = (Entry<ItemStack[], Tier>) iterator.next(); } while(!canBeSmelted(itemStacks, (ItemStack[]) entry.getKey())); ItemStack[] input = new ItemStack[3]; for(int i = 0; i < 3; i++) { if(itemStacks[i] != null) { input[i] = itemStacks[i].copy(); input[i].stackSize = ((ItemStack[]) entry.getKey())[i].stackSize; } } if(debug) FMLLog.log(Level.INFO, "Q " + itemStacks.length); if(!tier.isTearUnder(entry.getValue())) { if(debug) FMLLog.log(Level.INFO, "A " + tier + " " + entry.getValue()); return false; } return true; }
private static String writeAppenderFilterConfigFile(final File configFile, final Level level) throws IOException { final String xml = "<?xml version=\"1.0\" encoding=\"UTF-8\"?>" + "<Configuration monitorInterval=\"5\">" + "<Appenders>" + "<Console name=\"STDOUT\" target=\"SYSTEM_OUT\">" + "<filters>" + "<MarkerFilter marker=\"" + ENABLED_MARKER_NAME + "\" onMatch=\"ACCEPT\" onMismatch=\"DENY\"/>" + "</filters>" + "</Console>" + "</Appenders>" + "<Loggers>" + "<Logger name=\"" + TEST_LOGGER_NAME + "\" level=\"" + level.name() + "\" additivity=\"true\">" + "</Logger>" + "<Root level=\"FATAL\">" + "<AppenderRef ref=\"STDOUT\"/>" + "</Root>" + "</Loggers>" + "</Configuration>"; final BufferedWriter writer = new BufferedWriter(new FileWriter(configFile)); writer.write(xml); writer.close(); return xml; }
@Override public final void load() { ClientAPI.LOGGER.log(Level.INFO, "Loading Modules"); // Load Modules this.addAll( new Aura(), new Camera(), new Fly(), new Hud(), new Speed() ); }
/** * Restores STDOUT ConsoleAppender to ROOT logger. Only called when using the log4j2-default.xml * configuration. This is done when the LogWriterAppender for log-file is destroyed. The Appender * instance stored in stdoutAppender is used. */ public static synchronized void restoreConsoleAppender() { if (stdoutAppender == null) { return; } final AppenderContext appenderContext = LogService.getAppenderContext(LogService.ROOT_LOGGER_NAME); final LoggerConfig config = appenderContext.getLoggerConfig(); Appender stdout = config.getAppenders().get(STDOUT); if (stdout == null) { config.addAppender(stdoutAppender, Level.ALL, null); appenderContext.getLoggerContext().updateLoggers(); } }
/** * Set the log level for the logger on another node * * @param p_logLevel * Log level to set. * @param p_nodeId * Id of the node to change the log level on */ private void setLogLevel(final Level p_logLevel, final Short p_nodeId) { if (m_boot.getNodeID() == p_nodeId) { setLogLevel(p_logLevel); } else { SetLogLevelMessage message = new SetLogLevelMessage(p_nodeId, p_logLevel.name()); try { m_network.sendMessage(message); } catch (final NetworkException e) { // #if LOGGER >= ERROR LOGGER.error("Setting log level of node 0x%X failed: %s", p_nodeId, e); // #endif /* LOGGER >= ERROR */ } } }
public static void readConfig() { Configuration cfg = CommonProxy.config; try { cfg.load(); initGeneralConfig(cfg); } catch (Exception exception) { TitleChanger.logger.log(Level.ERROR, "Problem loading config file!", exception); } finally { if (cfg.hasChanged()) { cfg.save(); } } }
@Override public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) throws Exception { FMLLog.log(Level.ERROR, cause, "EntitySpawnHandler exception"); super.exceptionCaught(ctx, cause); }
/** * When the user changes the log level and new {@link Runnable} command is added to {@link Settings}. * <p> * Command run when the user applies the change in setting. * * @param event action event */ @FXML public void onLogLevelChanged(final ActionEvent event) { settings.addRunnable(() -> { final String logLevel = choiceBox.getSelectionModel().getSelectedItem(); final Logger logger = LogManager.getRootLogger(); Configurator.setLevel(logger.getName(), Level.toLevel(logLevel)); LOGGER.info("Log level was set to: " + Level.toLevel(logLevel)); }); event.consume(); }
@Test public void shouldPreferConfigurationFilePropertyIfSet() throws Exception { final File configFile = this.temporaryFolder.newFile(DEFAULT_CONFIG_FILE_NAME); final String configFileName = configFile.toURI().toString(); System.setProperty(ConfigurationFactory.CONFIGURATION_FILE_PROPERTY, configFileName); writeConfigFile(configFile, Level.DEBUG); LogService.reconfigure(); assertThat(LogService.isUsingGemFireDefaultConfig()).as(LogService.getConfigInformation()) .isFalse(); assertThat(System.getProperty(ConfigurationFactory.CONFIGURATION_FILE_PROPERTY)) .isEqualTo(configFileName); assertThat(LogService.getLogger().getName()).isEqualTo(getClass().getName()); }
public static VersionRange parseRange(String range) { try { return VersionRange.createFromVersionSpec(range); } catch (InvalidVersionSpecificationException e) { FMLLog.log(Level.ERROR, e, "Unable to parse a version range specification successfully %s", range); throw new LoaderException(e); } }
private static void updateLevel(Level level) { LoggerContext ctx = (LoggerContext) LogManager.getContext(false); Configuration config = ctx.getConfiguration(); LoggerConfig loggerConfig = config.getLoggerConfig(LogManager.ROOT_LOGGER_NAME); Level curLevel = loggerConfig.getLevel(); if (curLevel != level) { loggerConfig.setLevel(level); ctx.updateLoggers(); } }
public Object call() throws Exception { // Setup for capturing logger messages Appender mockAppender = mock(Appender.class); when(mockAppender.getName()).thenReturn("MockAppender"); when(mockAppender.isStarted()).thenReturn(true); when(mockAppender.isStopped()).thenReturn(false); Logger logger = (Logger) LogManager.getLogger(ColocationLogger.class); logger.addAppender(mockAppender); logger.setLevel(Level.WARN); loggingEventCaptor = ArgumentCaptor.forClass(LogEvent.class); // Logger interval may have been hooked by the test, so adjust test delays here int logInterval = ColocationLogger.getLogInterval(); List<LogEvent> logEvents = Collections.emptyList(); try { createPR(PR_REGION_NAME, true); createPR("region2", PR_REGION_NAME, true); // This child region is never created // Let this thread continue running long enough for the missing region to be logged a // couple times. // Grandchild region does not get created by this thread. (1.5*logInterval < delay < // 2*logInterval) await().atMost((int) (1.75 * logInterval), TimeUnit.MILLISECONDS).until(() -> { verify(mockAppender, times(numExpectedLogMessages)) .append(loggingEventCaptor.capture()); }); // createPR("region3", PR_REGION_NAME, true); // This child region is never created } finally { logEvents = loggingEventCaptor.getAllValues(); assertEquals(String.format("Expected %d messages to be logged, got %d.", numExpectedLogMessages, logEvents.size()), numExpectedLogMessages, logEvents.size()); String logMsg = logEvents.get(0).getMessage().getFormattedMessage(); logger.removeAppender(mockAppender); numExpectedLogMessages = 1; return logMsg; } }
private void readCsv(File file, Map<String, String> map) throws IOException { GradleStartCommon.LOGGER.log(Level.DEBUG, "Reading CSV file: {}", file); Splitter split = Splitter.on(',').trimResults().limit(3); for (String line : Files.readLines(file, Charsets.UTF_8)) { if (line.startsWith("searge")) // header line continue; List<String> splits = split.splitToList(line); map.put(splits.get(0), splits.get(1)); } }
@Override public void loadIncludedConfig(FMLInitializationEvent e, List<String> includedJsons) { for (String json : includedJsons) { JsonAdvancedCraftingHandler handler = BBJsonLoader.deserializeObject(json, JsonAdvancedCraftingHandler.class); if (handler == null) { LogUtil.log(Level.ERROR, "Could not deserialize included json."); continue; } boolean missingDependencies = false; for (String mod : handler.getModDependencies()) { if (!Loader.isModLoaded(mod)) { LogUtil.log(Level.WARN, "Mod '" + mod + "' missing, skipping all recipes in file."); missingDependencies = true; break; } } if (missingDependencies) { continue; } includes.add(handler); for (JsonAdvancedRecipe r : handler.getRecipes()) { r.register(); } } }
@Override void toBytes(ByteBuf buf) { super.toBytes(buf); ByteBufUtils.writeUTF8String(buf, modId); buf.writeInt(modEntityTypeId); buf.writeLong(entity.getUniqueID().getMostSignificantBits()); buf.writeLong(entity.getUniqueID().getLeastSignificantBits()); // posX, posY, posZ buf.writeDouble(entity.posX); buf.writeDouble(entity.posY); buf.writeDouble(entity.posZ); // yaw, pitch buf.writeByte((byte)(entity.rotationYaw * 256.0F / 360.0F)); buf.writeByte((byte) (entity.rotationPitch * 256.0F / 360.0F)); // head yaw if (entity instanceof EntityLivingBase) { buf.writeByte((byte) (((EntityLivingBase)entity).rotationYawHead * 256.0F / 360.0F)); } else { buf.writeByte(0); } ByteBuf tmpBuf = Unpooled.buffer(); PacketBuffer pb = new PacketBuffer(tmpBuf); try { entity.getDataManager().writeEntries(pb); } catch (IOException e) { FMLLog.log(Level.FATAL,e,"Encountered fatal exception trying to send entity spawn data watchers"); throw Throwables.propagate(e); } buf.writeBytes(tmpBuf); if (entity instanceof IThrowableEntity) { Entity owner = ((IThrowableEntity)entity).getThrower(); buf.writeInt(owner == null ? entity.getEntityId() : owner.getEntityId()); double maxVel = 3.9D; double mX = entity.motionX; double mY = entity.motionY; double mZ = entity.motionZ; if (mX < -maxVel) mX = -maxVel; if (mY < -maxVel) mY = -maxVel; if (mZ < -maxVel) mZ = -maxVel; if (mX > maxVel) mX = maxVel; if (mY > maxVel) mY = maxVel; if (mZ > maxVel) mZ = maxVel; buf.writeInt((int)(mX * 8000D)); buf.writeInt((int)(mY * 8000D)); buf.writeInt((int)(mZ * 8000D)); } else { buf.writeInt(0); } if (entity instanceof IEntityAdditionalSpawnData) { tmpBuf = Unpooled.buffer(); ((IEntityAdditionalSpawnData)entity).writeSpawnData(tmpBuf); buf.writeBytes(tmpBuf); } }
private void addPotionsAction(AttributeMap map) { List<PotionEffect> effects = new ArrayList<>(); for (String p : map.getList(ACTION_POTION)) { String[] splitted = StringUtils.split(p, ','); if (splitted == null || splitted.length != 3) { InControl.logger.log(Level.ERROR, "Bad potion specifier '" + p + "'! Use <potion>,<duration>,<amplifier>"); continue; } Potion potion = ForgeRegistries.POTIONS.getValue(new ResourceLocation(splitted[0])); if (potion == null) { InControl.logger.log(Level.ERROR, "Can't find potion '" + p + "'!"); continue; } int duration = 0; int amplifier = 0; try { duration = Integer.parseInt(splitted[1]); amplifier = Integer.parseInt(splitted[2]); } catch (NumberFormatException e) { InControl.logger.log(Level.ERROR, "Bad duration or amplifier integer for '" + p + "'!"); continue; } effects.add(new PotionEffect(potion, duration, amplifier)); } if (!effects.isEmpty()) { actions.add(event -> { EntityLivingBase living = getHelper(event); for (PotionEffect effect : effects) { PotionEffect neweffect = new PotionEffect(effect.getPotion(), effect.getDuration(), effect.getAmplifier()); living.addPotionEffect(neweffect); } }); } }
public TestDatarouterParentLog4j2Configuration() throws IllegalAccessException{ addFilter(RegexFilter.createFilter(".*password.*", null, true, Result.DENY, Result.NEUTRAL)); FileAppender testFileAppender = Log4j2Configurator.createFileAppender(TEST_APPENDER_NAME, TEST_FILE_NAME, defaultPattern); addAppender(testFileAppender); addLoggerConfig(getClass().getPackage().getName(), Level.ALL, false, testFileAppender); }
@Subscribe public void buildModList(FMLLoadEvent event) { Builder<String, EventBus> eventBus = ImmutableMap.builder(); for (final ModContainer mod : loader.getModList()) { //Create mod logger, and make the EventBus logger a child of it. EventBus bus = new EventBus(new SubscriberExceptionHandler() { @Override public void handleException(final Throwable exception, final SubscriberExceptionContext context) { LoadController.this.errorOccurred(mod, exception); } }); boolean isActive = mod.registerBus(bus, this); if (isActive) { activeModList.add(mod); modStates.put(mod.getModId(), ModState.UNLOADED); eventBus.put(mod.getModId(), bus); FMLCommonHandler.instance().addModToResourcePack(mod); } else { FMLLog.log(mod.getModId(), Level.WARN, "Mod %s has been disabled through configuration", mod.getModId()); modStates.put(mod.getModId(), ModState.UNLOADED); modStates.put(mod.getModId(), ModState.DISABLED); } modNames.put(mod.getModId(), mod.getName()); } eventChannels = eventBus.build(); }
private List<Item> getItems(List<String> itemNames) { List<Item> items = new ArrayList<>(); for (String name : itemNames) { Item item = ForgeRegistries.ITEMS.getValue(new ResourceLocation(name)); if (item == null) { InControl.logger.log(Level.ERROR, "Unknown item '" + name + "'!"); } else { items.add(item); } } return items; }
@Override public void loadIncludedConfig(FMLInitializationEvent e, List<String> includedJsons) { for (String json : includedJsons) { JsonRepairInfusionHandler handler = BBJsonLoader.deserializeObject(json, JsonRepairInfusionHandler.class); if (handler == null) { LogUtil.log(Level.ERROR, "Could not deserialize included json."); continue; } boolean missingDependencies = false; for (String mod : handler.getModDependencies()) { if (!Loader.isModLoaded(mod)) { LogUtil.log(Level.WARN, "Mod '" + mod + "' missing, skipping all recipes in file."); missingDependencies = true; break; } } if (missingDependencies) { continue; } includes.add(handler); for (JsonRepairInfusionAssociation r : handler.getEnchantmentAssociations()) { r.register(); } } }
private ClassPatch readPatch(JarEntry patchEntry, JarInputStream jis) { if (DEBUG) FMLRelaunchLog.finer("Reading patch data from %s", patchEntry.getName()); ByteArrayDataInput input; try { input = ByteStreams.newDataInput(ByteStreams.toByteArray(jis)); } catch (IOException e) { FMLRelaunchLog.log(Level.WARN, e, "Unable to read binpatch file %s - ignoring", patchEntry.getName()); return null; } String name = input.readUTF(); String sourceClassName = input.readUTF(); String targetClassName = input.readUTF(); boolean exists = input.readBoolean(); int inputChecksum = 0; if (exists) { inputChecksum = input.readInt(); } int patchLength = input.readInt(); byte[] patchBytes = new byte[patchLength]; input.readFully(patchBytes); return new ClassPatch(name, sourceClassName, targetClassName, exists, inputChecksum, patchBytes); }
private static void makeEmptyRuleFile(File file) { PrintWriter writer; try { writer = new PrintWriter(file); } catch (FileNotFoundException e) { InControl.logger.log(Level.ERROR, "Error writing " + file.getName() + "!"); return; } JsonArray array = new JsonArray(); Gson gson = new GsonBuilder().setPrettyPrinting().create(); writer.print(gson.toJson(array)); writer.close(); }
/** * Debug function * @param dimensionID */ public static void printNBTTagContentsForDimension(int dimensionID){ for(VillagerData data : villagerDataList){ if(data.getDimensionID() == dimensionID){ LogManager.getLogger().log(Level.WARN, "VillagerData for dimension " + dimensionID + " contains the following data:"); for(String key : data.getData().getKeySet()){ LogManager.getLogger().log(Level.WARN, "Key: " + key + " = " + data.getData().getInteger(key)); } } } }
public boolean craftItem() { InventoryCrafting inv = new InventoryCrafting(new ContainerAutoWorkBench(null, this), 3, 3); for(int i = 1; i < machineItemStacks.length - 1; i++) { inv.setInventorySlotContents(i - 1, machineItemStacks[i - 1]); FMLLog.log(Level.INFO, "E" + (i - 1)); } ItemStack result = AutoWorkBenchRecipes.smelting().getSmeltingResult(inv, worldObj, Tier.normal)[0]; if(canCraft() && result != null && (machineItemStacks[10] == null || result.isItemEqual(machineItemStacks[10]))) { FMLLog.log(Level.INFO, "F"); if(machineItemStacks[10] == null) { FMLLog.log(Level.INFO, "G"); machineItemStacks[10] = result; } else { FMLLog.log(Level.INFO, "H"); machineItemStacks[10].stackSize++; } for(int i = 1; i < machineItemStacks.length; i++) { FMLLog.log(Level.INFO, "I" + 1); if(machineItemStacks[i] != null) { FMLLog.log(Level.INFO, "J"); machineItemStacks[i].stackSize--; if(machineItemStacks[i].stackSize <= 0) { FMLLog.log(Level.INFO, "K"); machineItemStacks[i] = null; } } } FMLLog.log(Level.INFO, "L"); return true; } FMLLog.log(Level.INFO, "M"); return false; }